diff --git a/scripts/build-docs.test.ts b/scripts/build-docs.test.ts index 4a467410c4..4fda1cccd3 100644 --- a/scripts/build-docs.test.ts +++ b/scripts/build-docs.test.ts @@ -167,7 +167,6 @@ const baseConfig = { }, flags: { skipGit: true, - clean: true, skipApiErrors: true, }, } satisfies Partial[0]> @@ -205,7 +204,6 @@ Testing with a simple page.`, validSdks: ['nextjs', 'react'], flags: { skipGit: false, - clean: true, skipApiErrors: true, }, }), @@ -3263,10 +3261,232 @@ title: Updated Title // Check updated content const updatedContent = await readFile(pathJoin('./dist/cached-doc.mdx')) + expect(updatedContent).toContain('Updated Title') expect(updatedContent).toContain('Updated Content') }) + test('should invalidate linked pages when the markdown changes', async () => { + const { tempDir, pathJoin } = await createTempFiles([ + { + path: './docs/manifest.json', + content: JSON.stringify({ + navigation: [ + [ + { title: 'Cached Doc', href: '/docs/cached-doc' }, + { title: 'Linked Doc', href: '/docs/linked-doc' }, + ], + ], + }), + }, + { + path: './docs/cached-doc.mdx', + content: `--- +title: Original Title +--- + +[Link to Linked Doc](/docs/linked-doc)`, + }, + { + path: './docs/linked-doc.mdx', + content: `--- +title: Linked Doc +sdk: react, nextjs +--- + +# Linked Doc`, + }, + ]) + + // Create store to maintain cache across builds + const store = createBlankStore() + const config = await createConfig({ + ...baseConfig, + basePath: tempDir, + validSdks: ['react', 'nextjs', 'astro'], + }) + const invalidate = invalidateFile(store, config) + + // First build + await build(config, store) + + expect(await readFile(pathJoin('./dist/cached-doc.mdx'))).toContain( + 'Link to Linked Doc', + ) + + // Update file content + await fs.writeFile( + pathJoin('./docs/linked-doc.mdx'), + `--- +title: Linked Doc +sdk: react, nextjs, astro +--- + +# Linked Doc`, + 'utf-8', + ) + + invalidate(pathJoin('./docs/linked-doc.mdx')) + + // Second build with same store (should detect changes) + await build(config, store) + + // Check updated content + expect(await readFile(pathJoin('./dist/cached-doc.mdx'))).toContain( + 'Link to Linked Doc', + ) + }) + + test('should invalidate linked pages when the partial changes', async () => { + const { tempDir, pathJoin } = await createTempFiles([ + { + path: './docs/manifest.json', + content: JSON.stringify({ + navigation: [ + [ + { title: 'Cached Doc', href: '/docs/cached-doc' }, + { title: 'Linked Doc', href: '/docs/linked-doc' }, + ], + ], + }), + }, + { + path: './docs/_partials/partial.mdx', + content: `[Link to Linked Doc](/docs/linked-doc)`, + }, + { + path: './docs/cached-doc.mdx', + content: `--- +title: Original Title +--- + +`, + }, + { + path: './docs/linked-doc.mdx', + content: `--- +title: Linked Doc +sdk: react, nextjs +--- + +# Linked Doc`, + }, + ]) + + // Create store to maintain cache across builds + const store = createBlankStore() + const config = await createConfig({ + ...baseConfig, + basePath: tempDir, + validSdks: ['react', 'nextjs', 'astro'], + }) + const invalidate = invalidateFile(store, config) + + // First build + await build(config, store) + + expect(await readFile(pathJoin('./dist/cached-doc.mdx'))).toContain( + 'Link to Linked Doc', + ) + + // Update file content + await fs.writeFile( + pathJoin('./docs/linked-doc.mdx'), + `--- +title: Linked Doc +sdk: react, nextjs, astro +--- + +# Linked Doc`, + 'utf-8', + ) + + invalidate(pathJoin('./docs/linked-doc.mdx')) + + // Second build with same store (should detect changes) + await build(config, store) + + // Check updated content + expect(await readFile(pathJoin('./dist/cached-doc.mdx'))).toContain( + 'Link to Linked Doc', + ) + }) + + test('should invalidate linked pages when the typedoc changes', async () => { + const { tempDir, pathJoin } = await createTempFiles([ + { + path: './docs/manifest.json', + content: JSON.stringify({ + navigation: [ + [ + { title: 'Cached Doc', href: '/docs/cached-doc' }, + { title: 'Linked Doc', href: '/docs/linked-doc' }, + ], + ], + }), + }, + { + path: './typedoc/component.mdx', + content: `[Link to Linked Doc](/docs/linked-doc)`, + }, + { + path: './docs/cached-doc.mdx', + content: `--- +title: Original Title +--- + +`, + }, + { + path: './docs/linked-doc.mdx', + content: `--- +title: Linked Doc +sdk: react, nextjs +--- + +# Linked Doc`, + }, + ]) + + // Create store to maintain cache across builds + const store = createBlankStore() + const config = await createConfig({ + ...baseConfig, + basePath: tempDir, + validSdks: ['react', 'nextjs', 'astro'], + }) + const invalidate = invalidateFile(store, config) + + // First build + await build(config, store) + + expect(await readFile(pathJoin('./dist/cached-doc.mdx'))).toContain( + 'Link to Linked Doc', + ) + + // Update file content + await fs.writeFile( + pathJoin('./docs/linked-doc.mdx'), + `--- +title: Linked Doc +sdk: react, nextjs, astro +--- + +# Linked Doc`, + 'utf-8', + ) + + invalidate(pathJoin('./docs/linked-doc.mdx')) + + // Second build with same store (should detect changes) + await build(config, store) + + // Check updated content + expect(await readFile(pathJoin('./dist/cached-doc.mdx'))).toContain( + 'Link to Linked Doc', + ) + }) + test('should update doc content when the partial changes in a sdk scoped doc', async () => { const { tempDir, pathJoin } = await createTempFiles([ { @@ -4309,7 +4529,6 @@ describe('API Errors Generation', () => { flags: { skipApiErrors: false, skipGit: true, - clean: true, }, }), ) diff --git a/scripts/build-docs.ts b/scripts/build-docs.ts index 40ed64cd6d..9505d26938 100644 --- a/scripts/build-docs.ts +++ b/scripts/build-docs.ts @@ -38,6 +38,7 @@ import fs from 'node:fs/promises' import path from 'node:path' +import readdirp from 'readdirp' import { remark } from 'remark' import remarkFrontmatter from 'remark-frontmatter' import remarkMdx from 'remark-mdx' @@ -45,25 +46,23 @@ import { Node } from 'unist' import { filter as mdastFilter } from 'unist-util-filter' import { visit as mdastVisit } from 'unist-util-visit' import reporter from 'vfile-reporter' -import readdirp from 'readdirp' import { generateApiErrorDocs } from './lib/api-errors' import { createConfig, type BuildConfig } from './lib/config' import { watchAndRebuild } from './lib/dev' import { errorMessages, shouldIgnoreWarning } from './lib/error-messages' +import { getLastCommitDate } from './lib/getLastCommitDate' import { ensureDirectory, readDocsFolder, writeDistFile, writeSDKFile } from './lib/io' import { flattenTree, ManifestGroup, readManifest, traverseTree, traverseTreeItemsFirst } from './lib/manifest' import { parseInMarkdownFile } from './lib/markdown' import { readPartialsFolder, readPartialsMarkdown } from './lib/partials' import { isValidSdk, VALID_SDKS, type SDK } from './lib/schemas' -import { createBlankStore, DocsMap, getMarkdownCache, Store } from './lib/store' +import { createBlankStore, DocsMap, getCoreDocCache, getMarkdownCache, markDocumentDirty, Store } from './lib/store' import { readTypedocsFolder, readTypedocsMarkdown, typedocTableSpecialCharacters } from './lib/typedoc' -import { getLastCommitDate } from './lib/getLastCommitDate' import { documentHasIfComponents } from './lib/utils/documentHasIfComponents' import { extractComponentPropValueFromNode } from './lib/utils/extractComponentPropValueFromNode' import { extractSDKsFromIfProp } from './lib/utils/extractSDKsFromIfProp' -import { removeMdxSuffix } from './lib/utils/removeMdxSuffix' import { scopeHrefToSDK } from './lib/utils/scopeHrefToSDK' import { checkPartials } from './lib/plugins/checkPartials' @@ -76,9 +75,9 @@ import { validateUniqueHeadings } from './lib/plugins/validateUniqueHeadings' import { analyzeAndFixRedirects as optimizeRedirects, readRedirects, - type Redirect, transformRedirectsToObject, writeRedirects, + type Redirect, } from './lib/redirects' // Only invokes the main function if we run the script directly eg npm run build, bun run ./scripts/build-docs.ts @@ -149,7 +148,6 @@ async function main() { watch: args.includes('--watch'), controlled: args.includes('--controlled'), skipApiErrors: args.includes('--skip-api-errors'), - clean: args.includes('--clean'), skipGit: args.includes('--skip-git'), }, }) @@ -169,7 +167,7 @@ async function main() { if (config.flags.watch) { console.info(`Watching for changes...`) - watchAndRebuild(store, { ...config, flags: { ...config.flags, clean: true } }, build) + watchAndRebuild(store, config, build) } else if (output !== '') { process.exit(1) } @@ -188,7 +186,9 @@ export async function build(config: BuildConfig, store: Store = createBlankStore const writeFile = writeDistFile(config) const writeSdkFile = writeSDKFile(config) const markdownCache = getMarkdownCache(store) + const coreDocCache = getCoreDocCache(store) const getCommitDate = getLastCommitDate(config) + const markDirty = markDocumentDirty(store) await ensureDir(config.distFinalPath) @@ -247,14 +247,13 @@ export async function build(config: BuildConfig, store: Store = createBlankStore // Read in all the docs const docsArray = await Promise.all( docsFiles.map(async (file) => { - const href = removeMdxSuffix(`${config.baseDocsLink}${file.path}`) - const inManifest = docsInManifest.has(href) + const inManifest = docsInManifest.has(file.href) - const markdownFile = await markdownCache(href, () => - parseMarkdownFile(href, partials, typedocs, inManifest, 'docs'), + const markdownFile = await markdownCache(file.filePath, () => + parseMarkdownFile(file, partials, typedocs, inManifest, 'docs'), ) - docsMap.set(href, markdownFile) + docsMap.set(file.href, markdownFile) return markdownFile }), ) @@ -413,11 +412,16 @@ export async function build(config: BuildConfig, store: Store = createBlankStore try { let node: Node | null = null + const links: Set = new Set() const vfile = await remark() .use(remarkFrontmatter) .use(remarkMdx) - .use(validateAndEmbedLinks(config, docsMap, partialPath, 'partials')) + .use( + validateAndEmbedLinks(config, docsMap, partialPath, 'partials', (linkInPartial) => { + links.add(linkInPartial) + }), + ) .use(() => (tree, vfile) => { node = tree }) @@ -431,6 +435,7 @@ export async function build(config: BuildConfig, store: Store = createBlankStore ...partial, node: node as Node, vfile, + links, } } catch (error) { console.error(`✗ Error validating partial: ${partial.path}`) @@ -446,10 +451,15 @@ export async function build(config: BuildConfig, store: Store = createBlankStore try { let node: Node | null = null + const links: Set = new Set() const vfile = await remark() .use(remarkMdx) - .use(validateAndEmbedLinks(config, docsMap, filePath, 'typedoc')) + .use( + validateAndEmbedLinks(config, docsMap, filePath, 'typedoc', (linkInTypedoc) => { + links.add(linkInTypedoc) + }), + ) .use(() => (tree, vfile) => { node = tree }) @@ -463,13 +473,19 @@ export async function build(config: BuildConfig, store: Store = createBlankStore ...typedoc, vfile, node: node as Node, + links, } } catch (error) { try { let node: Node | null = null + const links: Set = new Set() const vfile = await remark() - .use(validateAndEmbedLinks(config, docsMap, filePath, 'typedoc')) + .use( + validateAndEmbedLinks(config, docsMap, filePath, 'typedoc', (linkInTypedoc) => { + links.add(linkInTypedoc) + }), + ) .use(() => (tree, vfile) => { node = tree }) @@ -483,6 +499,7 @@ export async function build(config: BuildConfig, store: Store = createBlankStore ...typedoc, vfile, node: node as Node, + links, } } catch (error) { console.error(error) @@ -522,29 +539,78 @@ export async function build(config: BuildConfig, store: Store = createBlankStore }), ) - const coreVFiles = await Promise.all( + const cachedCoreDocsSize = store.coreDocs.size + const coreDocs = await Promise.all( docsArray.map(async (doc) => { - const filePath = `${doc.href}.mdx` + const foundLinks: Set = new Set() + const foundPartials: Set = new Set() + const foundTypedocs: Set = new Set() - const vfile = await remark() - .use(remarkFrontmatter) - .use(remarkMdx) - .use(validateAndEmbedLinks(config, docsMap, filePath, 'docs', doc)) - .use(validateIfComponents(config, filePath, doc, flatSDKScopedManifest)) - .use(checkPartials(config, validatedPartials, filePath, { reportWarnings: false, embed: true })) - .use(checkTypedoc(config, validatedTypedocs, filePath, { reportWarnings: false, embed: true })) - .use( - insertFrontmatter({ - lastUpdated: (await getCommitDate(path.join(config.docsPath, '..', filePath)))?.toISOString() ?? undefined, - }), - ) - .process(doc.vfile) + const vfile = await coreDocCache(doc.file.filePath, async () => + remark() + .use(remarkFrontmatter) + .use(remarkMdx) + .use( + validateAndEmbedLinks( + config, + docsMap, + doc.file.filePath, + 'docs', + (link) => { + foundLinks.add(link) + }, + doc.file.href, + ), + ) + .use( + checkPartials(config, validatedPartials, doc.file, { reportWarnings: false, embed: true }, (partial) => { + foundPartials.add(partial) + }), + ) + .use( + checkTypedoc( + config, + validatedTypedocs, + doc.file.filePath, + { reportWarnings: false, embed: true }, + (typedoc) => { + foundTypedocs.add(typedoc) + }, + ), + ) + .use(validateIfComponents(config, doc.file.filePath, doc, flatSDKScopedManifest)) + .use( + insertFrontmatter({ + lastUpdated: (await getCommitDate(doc.file.fullFilePath))?.toISOString() ?? undefined, + }), + ) + .process(doc.vfile), + ) + + const partialsLinks = validatedPartials + .filter((partial) => foundPartials.has(`_partials/${partial.path}`)) + .reduce((acc, { links }) => new Set([...acc, ...links]), foundPartials) + + const typedocsLinks = validatedTypedocs + .filter((typedoc) => foundTypedocs.has(typedoc.path)) + .reduce((acc, { links }) => new Set([...acc, ...links]), foundTypedocs) + + const allLinks = new Set([...foundLinks, ...partialsLinks, ...typedocsLinks]) - const distFilePath = `${doc.href.replace(config.baseDocsLink, '')}.mdx` + allLinks.forEach((link) => { + markDirty(doc.file.filePath, link) + }) - if (isValidSdk(config)(distFilePath.split('/')[0])) { - if (!shouldIgnoreWarning(config, filePath, 'docs', 'sdk-path-conflict')) { - throw new Error(errorMessages['sdk-path-conflict'](doc.href, distFilePath)) + return { ...doc, vfile } + }), + ) + console.info(`✓ Validated all core docs (${cachedCoreDocsSize} cached)`) + + await Promise.all( + coreDocs.map(async (doc) => { + if (isValidSdk(config)(doc.file.filePathInDocsFolder.split('/')[0])) { + if (!shouldIgnoreWarning(config, doc.file.filePath, 'docs', 'sdk-path-conflict')) { + throw new Error(errorMessages['sdk-path-conflict'](doc.file.href, doc.file.filePathInDocsFolder)) } } @@ -552,23 +618,19 @@ export async function build(config: BuildConfig, store: Store = createBlankStore // This is a sdk specific doc, so we want to put a landing page here to redirect the user to a doc customized to their sdk. await writeFile( - distFilePath, + doc.file.filePathInDocsFolder, `--- template: wide --- -`, +`, ) - - return vfile + } else { + await writeFile(doc.file.filePathInDocsFolder, typedocTableSpecialCharacters.decode(doc.vfile.value as string)) } - - await writeFile(distFilePath, typedocTableSpecialCharacters.decode(String(vfile))) - - return vfile }), ) - console.info(`✓ Validated and wrote out all core docs`) + console.info(`✓ Wrote out all core docs (${coreDocs.length} total)`) const sdkSpecificVFiles = await Promise.all( config.validSdks.map(async (targetSdk) => { @@ -577,30 +639,28 @@ template: wide if (doc.sdk === undefined) return null // skip core docs if (doc.sdk.includes(targetSdk) === false) return null // skip docs that are not for the target sdk - const filePath = `${doc.href}.mdx` const vfile = await remark() .use(remarkFrontmatter) .use(remarkMdx) - .use(validateAndEmbedLinks(config, docsMap, filePath, 'docs', doc)) - .use(checkPartials(config, partials, filePath, { reportWarnings: true, embed: true })) - .use(checkTypedoc(config, typedocs, filePath, { reportWarnings: true, embed: true })) - .use(filterOtherSDKsContentOut(config, filePath, targetSdk)) - .use(validateUniqueHeadings(config, filePath, 'docs')) + .use(validateAndEmbedLinks(config, docsMap, doc.file.filePath, 'docs', undefined, doc.file.href)) + .use(checkPartials(config, partials, doc.file, { reportWarnings: true, embed: true })) + .use(checkTypedoc(config, typedocs, doc.file.filePath, { reportWarnings: true, embed: true })) + .use(filterOtherSDKsContentOut(config, doc.file.filePath, targetSdk)) + .use(validateUniqueHeadings(config, doc.file.filePath, 'docs')) .use( insertFrontmatter({ - canonical: doc.sdk ? scopeHrefToSDK(config)(doc.href, ':sdk:') : doc.href, - lastUpdated: - (await getCommitDate(path.join(config.docsPath, '..', filePath)))?.toISOString() ?? undefined, + canonical: doc.sdk ? scopeHrefToSDK(config)(doc.file.href, ':sdk:') : doc.file.href, + lastUpdated: (await getCommitDate(doc.file.fullFilePath))?.toISOString() ?? undefined, }), ) .process({ - path: filePath, + path: doc.file.filePath, value: doc.fileContent, }) await writeSdkFile( targetSdk, - `${doc.href.replace(config.baseDocsLink, '')}.mdx`, + doc.file.filePathInDocsFolder, typedocTableSpecialCharacters.decode(String(vfile)), ) @@ -622,17 +682,24 @@ template: wide const extractSDKsFromIfComponent = extractSDKsFromIfProp(config) for (const doc of docsWithOnlyIfComponents) { - const filePath = `${doc.href}.mdx` - // Extract all SDK values from all components const availableSDKs = new Set() mdastVisit(doc.node, (node) => { - const sdkProp = extractComponentPropValueFromNode(config, node, undefined, 'If', 'sdk', true, 'docs', filePath) + const sdkProp = extractComponentPropValueFromNode( + config, + node, + undefined, + 'If', + 'sdk', + true, + 'docs', + doc.file.filePath, + ) if (sdkProp === undefined) return - const sdks = extractSDKsFromIfComponent(node, undefined, sdkProp, 'docs', filePath) + const sdks = extractSDKsFromIfComponent(node, undefined, sdkProp, 'docs', doc.file.filePath) if (sdks === undefined) return @@ -653,20 +720,20 @@ template: wide 'sdk', false, 'docs', - filePath, + doc.file.filePath, ) if (!sdkProp) return true - const ifSdks = extractSDKsFromIfComponent(node, undefined, sdkProp, 'docs', filePath) + const ifSdks = extractSDKsFromIfComponent(node, undefined, sdkProp, 'docs', doc.file.filePath) if (!ifSdks) return true return ifSdks.includes(sdk) }) }) - .use(validateUniqueHeadings(config, filePath, 'docs')) + .use(validateUniqueHeadings(config, doc.file.filePath, 'docs')) .process({ - path: filePath, + path: doc.file.filePath, value: String(doc.vfile), }) } @@ -701,6 +768,7 @@ template: wide .flatMap(({ vFiles }) => vFiles) .filter((item): item is NonNullable => item !== null) + const coreVFiles = coreDocs.map((doc) => doc.vfile) const partialsVFiles = validatedPartials.map((partial) => partial.vfile) const typedocVFiles = validatedTypedocs.map((typedoc) => typedoc.vfile) diff --git a/scripts/lib/config.ts b/scripts/lib/config.ts index 82d458929e..e6d9bec546 100644 --- a/scripts/lib/config.ts +++ b/scripts/lib/config.ts @@ -42,7 +42,6 @@ type BuildConfigOptions = { watch?: boolean controlled?: boolean skipGit?: boolean - clean?: boolean skipApiErrors?: boolean } } @@ -116,7 +115,6 @@ export async function createConfig(config: BuildConfigOptions) { watch: config.flags?.watch ?? false, controlled: config.flags?.controlled ?? false, skipGit: config.flags?.skipGit ?? false, - clean: config.flags?.clean ?? false, skipApiErrors: config.flags?.skipApiErrors ?? false, }, } diff --git a/scripts/lib/dev.ts b/scripts/lib/dev.ts index 485a7f8534..7d83bbb62f 100644 --- a/scripts/lib/dev.ts +++ b/scripts/lib/dev.ts @@ -2,9 +2,10 @@ // invalidates the cache and kicks off a rebuild of the docs import watcher from '@parcel/watcher' +import path from 'path' +import type { build } from '../build-docs' import type { BuildConfig } from './config' import { invalidateFile, type Store } from './store' -import type { build } from '../build-docs' export const watchAndRebuild = (store: Store, config: BuildConfig, buildFunc: typeof build) => { const invalidate = invalidateFile(store, config) @@ -43,9 +44,27 @@ export const watchAndRebuild = (store: Store, config: BuildConfig, buildFunc: ty } watcher.subscribe(config.dataPath, handleFileChange) + + if (config.redirects) { + const staticDir = path.dirname(config.redirects.static.inputPath) + const dynamicDir = path.dirname(config.redirects.dynamic.inputPath) + + if (staticDir === dynamicDir) { + watcher.subscribe(staticDir, handleFileChange) + } else { + watcher.subscribe(staticDir, handleFileChange) + watcher.subscribe(dynamicDir, handleFileChange) + } + } + watcher.subscribe(config.docsPath, handleFileChange, { // Ignore generated files ignore: [`${config.docsPath}/errors/backend-api.mdx`, `${config.docsPath}/errors/frontend-api.mdx`], }) + watcher.subscribe(config.typedocPath, handleFileChange) + + if (config.publicPath) { + watcher.subscribe(config.publicPath, handleFileChange) + } } diff --git a/scripts/lib/error-messages.ts b/scripts/lib/error-messages.ts index 46637450af..ec8975a510 100644 --- a/scripts/lib/error-messages.ts +++ b/scripts/lib/error-messages.ts @@ -64,7 +64,7 @@ export const errorMessages = { 'link-hash-not-found': (hash: string, url: string): string => `Hash "${hash}" not found in ${url}`, // File reading errors - 'file-read-error': (filePath: string): string => `file ${filePath} doesn't exist`, + 'file-read-error': (filePath: string): string => `Failed to read in ${filePath}`, 'partial-read-error': (path: string): string => `Failed to read in ${path} from partials file`, 'markdown-read-error': (href: string): string => `Attempting to read in ${href}.mdx failed`, 'partial-parse-error': (path: string): string => `Failed to parse the content of ${path}`, diff --git a/scripts/lib/io.ts b/scripts/lib/io.ts index b5bf65032a..61dbb88da3 100644 --- a/scripts/lib/io.ts +++ b/scripts/lib/io.ts @@ -1,3 +1,4 @@ +import { removeMdxSuffix } from './utils/removeMdxSuffix' import { errorMessages } from './error-messages' import fs from 'node:fs/promises' import path from 'node:path' @@ -6,9 +7,7 @@ import readdirp from 'readdirp' import type { SDK } from './schemas' // Read in a markdown file from the docs folder -export const readMarkdownFile = (config: BuildConfig) => async (docPath: string) => { - const filePath = path.join(config.docsPath, docPath) - +export const readMarkdownFile = async (filePath: string) => { try { const fileContent = await fs.readFile(filePath, { encoding: 'utf-8' }) return [null, fileContent] as const @@ -19,14 +18,30 @@ export const readMarkdownFile = (config: BuildConfig) => async (docPath: string) // list all the docs in the docs folder export const readDocsFolder = (config: BuildConfig) => async () => { - return readdirp.promise(config.docsPath, { + const files = await readdirp.promise(config.docsPath, { type: 'files', fileFilter: (entry) => // Partials are inside the docs folder, so we need to exclude them `${config.docsRelativePath}/${entry.path}`.startsWith(config.partialsRelativePath) === false && entry.path.endsWith('.mdx'), }) + + return files.map((file) => { + const filePath = path.join(config.baseDocsLink, file.path) + const href = removeMdxSuffix(filePath) + + return { + filePath: filePath as `/docs/${string}.mdx`, + relativeFilePath: filePath.substring(1) as `docs/${string}.mdx`, + fullFilePath: path.join(config.basePath, '..', filePath) as `${string}.mdx`, + filePathInDocsFolder: file.path as `${string}.mdx`, + + href: href as `/docs/${string}`, + relativeHref: href.substring(1) as `docs/${string}`, + } + }) } +export type DocsFile = Awaited>>[number] // checks if a folder exists, if not it will be created export const ensureDirectory = diff --git a/scripts/lib/markdown.ts b/scripts/lib/markdown.ts index ea0bb5802f..8d83d99b6d 100644 --- a/scripts/lib/markdown.ts +++ b/scripts/lib/markdown.ts @@ -18,7 +18,7 @@ import { Node } from 'unist' import { visit as mdastVisit } from 'unist-util-visit' import { type BuildConfig } from './config' import { errorMessages, safeFail, safeMessage, type WarningsSection } from './error-messages' -import { readMarkdownFile } from './io' +import { type DocsFile, readMarkdownFile } from './io' import { checkPartials } from './plugins/checkPartials' import { checkTypedoc } from './plugins/checkTypedoc' import { extractFrontmatter, type Frontmatter } from './plugins/extractFrontmatter' @@ -28,17 +28,16 @@ import { extractHeadingFromHeadingNode } from './utils/extractHeadingFromHeading export const parseInMarkdownFile = (config: BuildConfig) => async ( - href: string, + file: DocsFile, partials: { path: string; content: string; node: Node }[], typedocs: { path: string; content: string; node: Node }[], inManifest: boolean, section: WarningsSection, ) => { - const readFile = readMarkdownFile(config) - const [error, fileContent] = await readFile(`${href}.mdx`.replace(config.baseDocsLink, '')) + const [error, fileContent] = await readMarkdownFile(file.fullFilePath) if (error !== null) { - throw new Error(errorMessages['markdown-read-error'](href), { + throw new Error(errorMessages['markdown-read-error'](file.href), { cause: error, }) } @@ -47,7 +46,6 @@ export const parseInMarkdownFile = const slugify = slugifyWithCounter() const headingsHashes = new Set() - const filePath = `${href}.mdx` let node: Node | undefined = undefined const vfile = await remark() @@ -57,22 +55,22 @@ export const parseInMarkdownFile = node = tree if (inManifest === false) { - safeMessage(config, vfile, filePath, section, 'doc-not-in-manifest', []) + safeMessage(config, vfile, file.filePath, section, 'doc-not-in-manifest', []) } - if (href !== encodeURI(href)) { - safeFail(config, vfile, filePath, section, 'invalid-href-encoding', [href]) + if (file.href !== encodeURI(file.href)) { + safeFail(config, vfile, file.filePath, section, 'invalid-href-encoding', [file.href]) } }) .use( - extractFrontmatter(config, href, filePath, section, (fm) => { + extractFrontmatter(config, file.href, file.filePath, section, (fm) => { frontmatter = fm }), ) - .use(checkPartials(config, partials, filePath, { reportWarnings: true, embed: false })) - .use(checkTypedoc(config, typedocs, filePath, { reportWarnings: true, embed: false })) + .use(checkPartials(config, partials, file, { reportWarnings: true, embed: false })) + .use(checkTypedoc(config, typedocs, file.filePath, { reportWarnings: true, embed: false })) .process({ - path: `${href.substring(1)}.mdx`, + path: file.relativeFilePath, value: fileContent, }) @@ -81,8 +79,8 @@ export const parseInMarkdownFile = await remark() .use(remarkFrontmatter) .use(remarkMdx) - .use(checkPartials(config, partials, filePath, { reportWarnings: false, embed: true })) - .use(checkTypedoc(config, typedocs, filePath, { reportWarnings: false, embed: true })) + .use(checkPartials(config, partials, file, { reportWarnings: false, embed: true })) + .use(checkTypedoc(config, typedocs, file.filePath, { reportWarnings: false, embed: true })) // extract out the headings to check hashes in links .use(() => (tree, vfile) => { const documentContainsIfComponent = documentHasIfComponents(tree) @@ -95,7 +93,7 @@ export const parseInMarkdownFile = if (id !== undefined) { if (documentContainsIfComponent === false && headingsHashes.has(id)) { - safeFail(config, vfile, filePath, section, 'duplicate-heading-id', [href, id]) + safeFail(config, vfile, file.filePath, section, 'duplicate-heading-id', [file.href, id]) } headingsHashes.add(id) @@ -103,7 +101,7 @@ export const parseInMarkdownFile = const slug = slugify(toString(node).trim()) if (documentContainsIfComponent === false && headingsHashes.has(slug)) { - safeFail(config, vfile, filePath, section, 'duplicate-heading-id', [href, slug]) + safeFail(config, vfile, file.filePath, section, 'duplicate-heading-id', [file.href, slug]) } headingsHashes.add(slug) @@ -112,20 +110,20 @@ export const parseInMarkdownFile = ) }) .process({ - path: `${href.substring(1)}.mdx`, + path: file.relativeFilePath, value: fileContent, }) if (node === undefined) { - throw new Error(errorMessages['doc-parse-failed'](href)) + throw new Error(errorMessages['doc-parse-failed'](file.href)) } if (frontmatter === undefined) { - throw new Error(errorMessages['frontmatter-parse-failed'](href)) + throw new Error(errorMessages['frontmatter-parse-failed'](file.href)) } return { - href, + file, sdk: (frontmatter as Frontmatter).sdk, vfile, headingsHashes, diff --git a/scripts/lib/partials.ts b/scripts/lib/partials.ts index 9ab9ffe158..759dfd8f7d 100644 --- a/scripts/lib/partials.ts +++ b/scripts/lib/partials.ts @@ -25,11 +25,9 @@ export const readPartialsFolder = (config: BuildConfig) => async () => { } export const readPartial = (config: BuildConfig) => async (filePath: string) => { - const readFile = readMarkdownFile(config) + const fullPath = path.join(config.partialsPath, filePath) - const fullPath = path.join(config.docsRelativePath, config.partialsRelativePath, filePath) - - const [error, content] = await readFile(fullPath) + const [error, content] = await readMarkdownFile(fullPath) if (error) { throw new Error(errorMessages['partial-read-error'](fullPath), { cause: error }) diff --git a/scripts/lib/plugins/checkPartials.ts b/scripts/lib/plugins/checkPartials.ts index b14a198c72..30d54d9524 100644 --- a/scripts/lib/plugins/checkPartials.ts +++ b/scripts/lib/plugins/checkPartials.ts @@ -4,12 +4,13 @@ // - only embed the partials contents in to the markdown // - both report warnings and embed the partials contents -import type { BuildConfig } from '../config' import type { Node } from 'unist' -import type { VFile } from 'vfile' import { map as mdastMap } from 'unist-util-map' -import { extractComponentPropValueFromNode } from '../utils/extractComponentPropValueFromNode' +import type { VFile } from 'vfile' +import type { BuildConfig } from '../config' import { safeMessage } from '../error-messages' +import type { DocsFile } from '../io' +import { extractComponentPropValueFromNode } from '../utils/extractComponentPropValueFromNode' import { removeMdxSuffix } from '../utils/removeMdxSuffix' export const checkPartials = @@ -19,11 +20,12 @@ export const checkPartials = node: Node path: string }[], - filePath: string, + file: DocsFile, options: { reportWarnings: boolean embed: boolean }, + foundPartial?: (partial: string) => void, ) => () => (tree: Node, vfile: VFile) => { @@ -36,14 +38,14 @@ export const checkPartials = 'src', true, 'docs', - filePath, + file.filePath, ) if (partialSrc === undefined) return node if (partialSrc.startsWith('_partials/') === false) { if (options.reportWarnings === true) { - safeMessage(config, vfile, filePath, 'docs', 'include-src-not-partials', [], node.position) + safeMessage(config, vfile, file.filePath, 'docs', 'include-src-not-partials', [], node.position) } return node } @@ -55,7 +57,7 @@ export const checkPartials = safeMessage( config, vfile, - filePath, + file.filePath, 'docs', 'partial-not-found', [removeMdxSuffix(partialSrc)], @@ -65,6 +67,8 @@ export const checkPartials = return node } + foundPartial?.(`${removeMdxSuffix(partialSrc)}.mdx`) + if (options.embed === true) { return Object.assign(node, partial.node) } diff --git a/scripts/lib/plugins/checkTypedoc.ts b/scripts/lib/plugins/checkTypedoc.ts index d1eb874c03..14a46b7895 100644 --- a/scripts/lib/plugins/checkTypedoc.ts +++ b/scripts/lib/plugins/checkTypedoc.ts @@ -20,6 +20,7 @@ export const checkTypedoc = typedocs: { path: string; node: Node }[], filePath: string, options: { reportWarnings: boolean; embed: boolean }, + foundTypedoc?: (typedoc: string) => void, ) => () => (tree: Node, vfile: VFile) => { @@ -60,6 +61,8 @@ export const checkTypedoc = return node } + foundTypedoc?.(`${removeMdxSuffix(typedocSrc)}.mdx`) + if (options.embed === true) { return Object.assign(node, typedoc.node) } diff --git a/scripts/lib/plugins/validateAndEmbedLinks.ts b/scripts/lib/plugins/validateAndEmbedLinks.ts index 98dd625ac4..7c14b32281 100644 --- a/scripts/lib/plugins/validateAndEmbedLinks.ts +++ b/scripts/lib/plugins/validateAndEmbedLinks.ts @@ -10,13 +10,20 @@ import type { VFile } from 'vfile' import { SDKLink } from '../components/SDKLink' import { type BuildConfig } from '../config' import { safeMessage, type WarningsSection } from '../error-messages' -import { DocsMap } from '../store' +import { type DocsMap } from '../store' +import { findComponent } from '../utils/findComponent' import { removeMdxSuffix } from '../utils/removeMdxSuffix' import { scopeHrefToSDK } from '../utils/scopeHrefToSDK' -import { findComponent } from '../utils/findComponent' export const validateAndEmbedLinks = - (config: BuildConfig, docsMap: DocsMap, filePath: string, section: WarningsSection, doc?: { href: string }) => + ( + config: BuildConfig, + docsMap: DocsMap, + filePath: string, + section: WarningsSection, + foundLink?: (link: string) => void, + href?: string, + ) => () => (tree: Node, vfile: VFile) => { const checkCardsComponentScope = watchComponentScope('Cards') @@ -27,7 +34,7 @@ export const validateAndEmbedLinks = if (node.type !== 'link') return node if (!('url' in node)) return node if (typeof node.url !== 'string') return node - if (!node.url.startsWith(config.baseDocsLink) && (!node.url.startsWith('#') || doc === undefined)) return node + if (!node.url.startsWith(config.baseDocsLink) && (!node.url.startsWith('#') || href === undefined)) return node if (!('children' in node)) return node // we are overwriting the url with the mdx suffix removed @@ -35,9 +42,9 @@ export const validateAndEmbedLinks = let [url, hash] = (node.url as string).split('#') - if (url === '' && doc !== undefined) { + if (url === '' && href !== undefined) { // If the link is just a hash, then we need to link to the same doc - url = doc.href + url = href } const ignore = config.ignoredLink(url) @@ -50,6 +57,8 @@ export const validateAndEmbedLinks = return node } + foundLink?.(linkedDoc.file.filePath) + if (hash !== undefined) { const hasHash = linkedDoc.headingsHashes.has(hash) diff --git a/scripts/lib/plugins/validateIfComponents.ts b/scripts/lib/plugins/validateIfComponents.ts index 511f575462..461136f8cf 100644 --- a/scripts/lib/plugins/validateIfComponents.ts +++ b/scripts/lib/plugins/validateIfComponents.ts @@ -9,7 +9,12 @@ import { extractComponentPropValueFromNode } from '../utils/extractComponentProp import { extractSDKsFromIfProp } from '../utils/extractSDKsFromIfProp' export const validateIfComponents = - (config: BuildConfig, filePath: string, doc: { href: string; sdk?: SDK[] }, flatSDKScopedManifest: ManifestItem[]) => + ( + config: BuildConfig, + filePath: string, + doc: { file: { href: string }; sdk?: SDK[] }, + flatSDKScopedManifest: ManifestItem[], + ) => () => (tree: Node, vfile: VFile) => { mdastVisit(tree, (node) => { @@ -21,7 +26,7 @@ export const validateIfComponents = if (sdksFilter === undefined) return - const manifestItems = flatSDKScopedManifest.filter((item) => item.href === doc.href) + const manifestItems = flatSDKScopedManifest.filter((item) => item.href === doc.file.href) const availableSDKs = manifestItems.flatMap((item) => item.sdk).filter(Boolean) @@ -59,7 +64,7 @@ export const validateIfComponents = filePath, 'docs', 'if-component-sdk-not-in-manifest', - [sdk, doc.href], + [sdk, doc.file.href], node.position, ) } diff --git a/scripts/lib/store.ts b/scripts/lib/store.ts index ca44f23cd2..f5003703bb 100644 --- a/scripts/lib/store.ts +++ b/scripts/lib/store.ts @@ -4,35 +4,63 @@ // use the `invalidateFile()` function to remove a file from the store import path from 'node:path' +import type { VFile } from 'vfile' import type { BuildConfig } from './config' -import { removeMdxSuffix } from './utils/removeMdxSuffix' +import type { parseInMarkdownFile } from './markdown' import type { readPartial } from './partials' import type { readTypedoc } from './typedoc' -import type { parseInMarkdownFile } from './markdown' type MarkdownFile = Awaited>> +type CoreDocsFile = VFile type PartialsFile = Awaited>> type TypedocsFile = Awaited>> export type DocsMap = Map +export type CoreDocsMap = Map export type PartialsMap = Map export type TypedocsMap = Map export const createBlankStore = () => ({ markdown: new Map() as DocsMap, + coreDocs: new Map() as CoreDocsMap, partials: new Map() as PartialsMap, typedocs: new Map() as TypedocsMap, + dirtyDocMap: new Map() as Map>, }) export type Store = ReturnType export const invalidateFile = - (store: ReturnType, config: BuildConfig) => (filePath: string) => { - store.markdown.delete(removeMdxSuffix(`${config.baseDocsLink}${path.relative(config.docsPath, filePath)}`)) + (store: ReturnType, config: BuildConfig) => + (filePath: string, invalidateAdjacentDocs: boolean = true) => { + console.log(`invalidating ${filePath}`) + + const docsPath = path.join(config.baseDocsLink, path.relative(config.docsPath, filePath)) + + if (store.markdown.has(docsPath) && store.coreDocs.has(docsPath)) { + store.markdown.delete(docsPath) + store.coreDocs.delete(docsPath) + + const adjacentDocs = store.dirtyDocMap.get(docsPath) + + if (adjacentDocs && invalidateAdjacentDocs) { + const invalidate = invalidateFile(store, config) + adjacentDocs.forEach((docPath) => { + invalidate(docPath, false) + }) + } + } store.partials.delete(path.relative(config.partialsPath, filePath)) store.typedocs.delete(path.relative(config.typedocPath, filePath)) } +export const markDocumentDirty = + (store: ReturnType) => (filePath: string, adjustedByFilePath: string) => { + const dirtyDocs = store.dirtyDocMap.get(adjustedByFilePath) ?? new Set() + dirtyDocs.add(filePath) + store.dirtyDocMap.set(adjustedByFilePath, dirtyDocs) + } + export const getMarkdownCache = (store: Store) => { return async (key: string, cacheMiss: (key: string) => Promise) => { const cached = store.markdown.get(key) @@ -44,6 +72,17 @@ export const getMarkdownCache = (store: Store) => { } } +export const getCoreDocCache = (store: Store) => { + return async (key: string, cacheMiss: (key: string) => Promise) => { + const cached = store.coreDocs.get(key) + if (cached) return structuredClone(cached) + + const result = await cacheMiss(key) + store.coreDocs.set(key, structuredClone(result)) + return result + } +} + export const getPartialsCache = (store: Store) => { return async (key: string, cacheMiss: (key: string) => Promise) => { const cached = store.partials.get(key) diff --git a/scripts/lib/typedoc.ts b/scripts/lib/typedoc.ts index c2f3acf1f0..7354df9384 100644 --- a/scripts/lib/typedoc.ts +++ b/scripts/lib/typedoc.ts @@ -24,11 +24,9 @@ export const readTypedocsFolder = (config: BuildConfig) => async () => { } export const readTypedoc = (config: BuildConfig) => async (filePath: string) => { - const readFile = readMarkdownFile(config) + const typedocPath = path.join(config.typedocPath, filePath) - const typedocPath = path.join(config.typedocRelativePath, filePath) - - const [error, content] = await readFile(typedocPath) + const [error, content] = await readMarkdownFile(typedocPath) if (error) { throw new Error(errorMessages['typedoc-read-error'](typedocPath), { cause: error })