Skip to content

feat: support nested docs #275

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 8 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
90 changes: 58 additions & 32 deletions scripts/fetch-docs-local.mjs
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,62 @@ async function getHeadings(source) {
})
}

export async function parseDocs(fileNames, topicSlugs, topicDirectory) {
const parsedDocs = await Promise.all(
fileNames.map(async docFilename => {
try {
const path = `${topicDirectory}/${docFilename}`

if(fs.lstatSync(path).isDirectory()) {
const subDocSlugs = fs.readdirSync(path)

const subDocs = await parseDocs(
subDocSlugs,
topicSlugs.concat(docFilename),
path
)
const subTopic = {
slug: docFilename,
path: topicSlugs.concat(docFilename).join('/')+'/',
docs: subDocs.filter(Boolean).sort((a, b) => a.order - b.order),
}
return subTopic
} else {
const rawDoc = fs.readFileSync(
path,
'utf8',
)

const parsedDoc = matter(rawDoc)

const doc = {
content: await serialize(parsedDoc.content, {
mdxOptions: {
remarkPlugins: [remarkGfm],
},
}),
title: parsedDoc.data.title,
slug: docFilename.replace('.mdx', ''),
path: topicSlugs.join('/')+'/',
label: parsedDoc.data.label,
order: parsedDoc.data.order,
desc: parsedDoc.data.desc || '',
keywords: parsedDoc.data.keywords || '',
headings: await getHeadings(parsedDoc.content),
}

return doc
}

} catch (error) {
const msg = error instanceof Error ? error.message : error || 'Unknown error'
console.error(`Error fetching ${docFilename}: ${msg}`) // eslint-disable-line no-console
}
}),
)
return parsedDocs
}

const fetchDocs = async () => {
const topics = await Promise.all(
topicOrder.map(async unsanitizedTopicSlug => {
Expand All @@ -80,41 +136,11 @@ const fetchDocs = async () => {
const topicDirectory = path.join(docsDirectory, `./${topicSlug}`)
const docSlugs = fs.readdirSync(topicDirectory)

const parsedDocs = await Promise.all(
docSlugs.map(async docFilename => {
try {
const rawDoc = fs.readFileSync(
`${docsDirectory}/${topicSlug.toLowerCase()}/${docFilename}`,
'utf8',
)

const parsedDoc = matter(rawDoc)

const doc = {
content: await serialize(parsedDoc.content, {
mdxOptions: {
remarkPlugins: [remarkGfm],
},
}),
title: parsedDoc.data.title,
slug: docFilename.replace('.mdx', ''),
label: parsedDoc.data.label,
order: parsedDoc.data.order,
desc: parsedDoc.data.desc || '',
keywords: parsedDoc.data.keywords || '',
headings: await getHeadings(parsedDoc.content),
}

return doc
} catch (error) {
const msg = err instanceof Error ? err.message : err || 'Unknown error'
console.error(`Error fetching ${docFilename}: ${msg}`) // eslint-disable-line no-console
}
}),
)
const parsedDocs = await parseDocs(docSlugs, [topicSlug], `${docsDirectory}/${topicSlug}`)

const topic = {
slug: unsanitizedTopicSlug,
path: '/',
docs: parsedDocs.filter(Boolean).sort((a, b) => a.order - b.order),
}

Expand Down
100 changes: 67 additions & 33 deletions scripts/fetch-docs.mjs
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ function slugify(string) {
}

const githubAPI = 'https://api.github.com/repos/payloadcms/payload'
const branch = 'main'

const topicOrder = [
'Getting-Started',
Expand Down Expand Up @@ -78,6 +79,66 @@ async function getHeadings(source) {
})
}

export async function parseDocs(docFilenames, topicSlugs, topicURL) {
const parsedDocs = await Promise.all(
docFilenames.map(async docFilename => {
try {
const path = `${topicURL}/${docFilename}`
const isDirectory = docFilename.includes('.md') ? false : true
if(isDirectory) {
const subDocs = await fetch(`${path}?ref=${branch}`, {
headers,
}).then(res => res.json())
const subDocFilenames = subDocs.map(({ name }) => name)

const parsedSubDocs = await parseDocs(
subDocFilenames,
topicSlugs.concat(docFilename),
path
)

const subTopic = {
slug: docFilename,
path: topicSlugs.concat(docFilename).join('/')+'/',
docs: parsedSubDocs.filter(Boolean).sort((a, b) => a.order - b.order),
}
return subTopic
} else {
const json = await fetch(`${path}?ref=${branch}`, {
headers,
}).then(res => res.json())

const parsedDoc = matter(decodeBase64(json.content))


const doc = {
content: await serialize(parsedDoc.content, {
mdxOptions: {
remarkPlugins: [remarkGfm],
},
}),
title: parsedDoc.data.title,
slug: docFilename.replace('.mdx', ''),
path: topicSlugs.join('/')+'/',
label: parsedDoc.data.label,
order: parsedDoc.data.order,
desc: parsedDoc.data.desc || '',
keywords: parsedDoc.data.keywords || '',
headings: await getHeadings(parsedDoc.content),
}

return doc
}

} catch (error) {
const msg = error instanceof Error ? error.message : error || 'Unknown error'
console.error(`Error fetching ${docFilename}: ${msg}`) // eslint-disable-line no-console
}
}),
)
return parsedDocs
}

const fetchDocs = async () => {
if (!process.env.GITHUB_ACCESS_TOKEN) {
console.log('No GitHub access token found - skipping docs retrieval') // eslint-disable-line no-console
Expand All @@ -88,53 +149,26 @@ const fetchDocs = async () => {
topicOrder.map(async unsanitizedTopicSlug => {
const topicSlug = unsanitizedTopicSlug.toLowerCase()

const docs = await fetch(`${githubAPI}/contents/docs/${topicSlug}`, {
const docs = await fetch(`${githubAPI}/contents/docs/${topicSlug}?ref=${branch}`, {
headers,
}).then(res => res.json())

const docFilenames = docs.map(({ name }) => name)

const parsedDocs = await Promise.all(
docFilenames.map(async docFilename => {
try {
const json = await fetch(`${githubAPI}/contents/docs/${topicSlug}/${docFilename}`, {
headers,
}).then(res => res.json())

const parsedDoc = matter(decodeBase64(json.content))

const doc = {
content: await serialize(parsedDoc.content, {
mdxOptions: {
remarkPlugins: [remarkGfm],
},
}),
title: parsedDoc.data.title,
slug: docFilename.replace('.mdx', ''),
label: parsedDoc.data.label,
order: parsedDoc.data.order,
desc: parsedDoc.data.desc || '',
keywords: parsedDoc.data.keywords || '',
headings: await getHeadings(parsedDoc.content),
}

return doc
} catch (err) {
const msg = err instanceof Error ? err.message : err || 'Unknown error'
console.error(`Error fetching ${docFilename}: ${msg}`) // eslint-disable-line no-console
}
}),
)
const topicURL = `${githubAPI}/contents/docs/${topicSlug}`

const parsedDocs = await parseDocs(docFilenames, [topicSlug], topicURL)

const topic = {
slug: unsanitizedTopicSlug,
path: '/',
docs: parsedDocs.filter(Boolean).sort((a, b) => a.order - b.order),
}

return topic
}),
)


const data = JSON.stringify(topics, null, 2)

const docsFilename = path.resolve(__dirname, './src/app/docs.json')
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,9 @@ import { NextDoc } from '../../types'
import { RenderDoc } from './client_page'

const Doc = async ({ params }) => {
const { topic, doc: docSlug } = params
const doc = await getDoc({ topic, doc: docSlug })
const { topic, doc: docSlugs } = params
const docPathWithSlug = topic + '/' + docSlugs.join('/')
const doc = await getDoc({ topic, doc: docPathWithSlug })
const topics = await getTopics()

const relatedThreads = await fetchRelatedThreads()
Expand All @@ -29,7 +30,9 @@ const Doc = async ({ params }) => {
let next: NextDoc | null = null

if (parentTopic) {
const docIndex = parentTopic?.docs.findIndex(({ slug }) => slug === docSlug)
const docIndex = parentTopic?.docs.findIndex(
({ path, slug }) => path + slug === docPathWithSlug,
)

if (parentTopic?.docs?.[docIndex + 1]) {
next = {
Expand Down Expand Up @@ -57,41 +60,44 @@ export default Doc

type Param = {
topic: string
doc: string
doc: string[]
}

export async function generateStaticParams() {
if (process.env.NEXT_PUBLIC_SKIP_BUILD_DOCS) return []

const topics = await getTopics()

function extractParams(docs, topicSlug: string, parentSlugs: string[] = []): Param[] {
return docs.flatMap(doc => {
// If doc has subdocs, recursively call extractParams
if (doc.docs && doc.docs.length > 0) {
return extractParams(doc.docs, topicSlug, [...parentSlugs, doc.slug])
} else if (doc.slug) {
// If there are no subdocs, add the doc (including parent slugs if any)
return [{ topic: topicSlug.toLowerCase(), doc: [...parentSlugs, doc.slug] }]
}
return [] // If doc has no slug, return an empty array to avoid null values
})
}

const result = topics.reduce((params: Param[], topic) => {
return params.concat(
topic.docs
.map(doc => {
if (!doc.slug) return null as any

return {
topic: topic.slug.toLowerCase(),
doc: doc.slug,
}
})
.filter(Boolean),
)
}, [])
const topicParams = extractParams(topic.docs, topic.slug)
return params.concat(topicParams)
}, [] as Param[])

return result
}

export async function generateMetadata({ params: { topic: topicSlug, doc: docSlug } }) {
const doc = await getDoc({ topic: topicSlug, doc: docSlug })
export async function generateMetadata({ params: { topic: topicSlug, doc: docSlugs } }) {
const docPathWithSlug = topicSlug + docSlugs.join('/')
const doc = await getDoc({ topic: topicSlug, doc: docPathWithSlug })

return {
title: `${doc?.title ? `${doc.title} | ` : ''}Documentation | Payload CMS`,
description: doc?.desc || `Payload CMS ${topicSlug} Documentation`,
openGraph: mergeOpenGraph({
title: `${doc?.title ? `${doc.title} | ` : ''}Documentation | Payload CMS`,
url: `/docs/${topicSlug}/${docSlug}`,
url: `/docs/${docPathWithSlug}`,
images: [
{
url: `/api/og?topic=${topicSlug}&title=${doc?.title}`,
Expand Down
40 changes: 35 additions & 5 deletions src/app/(pages)/docs/api.ts
Original file line number Diff line number Diff line change
@@ -1,20 +1,50 @@
import content from '../../docs.json'
import type { Doc, DocPath, Topic } from './types'
import type { Doc, DocMeta, DocOrTopic, DocPath, Topic } from './types'

export async function getTopics(): Promise<Topic[]> {
return content.map(topic => ({
slug: topic.slug,
path: topic.path || '/',
docs: topic.docs.map(doc => ({
title: doc?.title || '',
label: doc?.label || '',
slug: doc?.slug || '',
order: doc?.order || 0,
docs: ((doc as any)?.docs as DocMeta[]) || null,
path: doc?.path || '/',
})),
}))
}

export async function getDoc({ topic: topicSlug, doc: docSlug }: DocPath): Promise<Doc | null> {
const matchedTopic = content.find(topic => topic.slug.toLowerCase() === topicSlug)
const matchedDoc = matchedTopic?.docs?.find(doc => doc?.slug === docSlug) || null
return matchedDoc
export async function getDoc({
topic: topicSlug,
doc: docPathWithSlug,
}: DocPath): Promise<Doc | null> {
// Find the matched topic first
const matchedTopic = content.find(topic => topic.slug.toLowerCase() === topicSlug.toLowerCase())

// If there's no matched topic, return null early.
if (!matchedTopic) return null

// Recursive function to find a doc by slug within a topic or sub-docs
function findDoc(docs: DocOrTopic[], pathAndSlug: string): Doc | null {
for (const doc of docs) {
// Check if the current doc matches the slug
if (doc && (doc.path || '/') + (doc.slug || '/') === pathAndSlug && !('docs' in doc)) {
return doc
}
// If the current doc has subdocs, search within them recursively
if ('docs' in doc && doc?.docs && doc.docs.length > 0) {
const subDoc = findDoc(doc.docs, pathAndSlug.toLowerCase())
if (subDoc) {
return subDoc
}
}
}
// If no doc matches, return null
return null
}

// Use the recursive function to find the matched doc or subdoc
return findDoc(matchedTopic.docs || [], docPathWithSlug.toLowerCase())
}
Loading