1
0
mirror of synced 2025-12-19 18:10:59 -05:00

Convert all JavaScript files to TypeScript in src/automated-pipelines directory (#55861)

Co-authored-by: copilot-swe-agent[bot] <198982749+Copilot@users.noreply.github.com>
Co-authored-by: heiskr <1221423+heiskr@users.noreply.github.com>
Co-authored-by: Ebonsignori <17055832+Ebonsignori@users.noreply.github.com>
Co-authored-by: Evan Bonsignori <ebonsignori@github.com>
This commit is contained in:
Copilot
2025-06-02 17:50:10 +00:00
committed by GitHub
parent 40551f3718
commit f8fcee0c3d
6 changed files with 179 additions and 57 deletions

View File

@@ -21,13 +21,15 @@ Automated pages allow for manually created content to be prepended to the automa
## How does it work ## How does it work
We currently have two patterns that we used to create automated pipelines: We currently have two patterns that we used to create automated pipelines:
- REST, Webhooks, GitHub Apps, and GraphQL pipelines consume external structured data and transform that data into a JSON file that is used to create content for a page on docs.github.com. Typically, data files are a 1:1 mapping to a specific page on docs.github.com. - REST, Webhooks, GitHub Apps, and GraphQL pipelines consume external structured data and transform that data into a JSON file that is used to create content for a page on docs.github.com. Typically, data files are a 1:1 mapping to a specific page on docs.github.com.
- The CodeQL CLI pipeline takes an unstructured ReStructuredText file and transforms it directly into a Markdown file with frontmatter, that uses the same authoring format as the rest of the docs. - The CodeQL CLI pipeline takes an unstructured ReStructuredText file and transforms it directly into a Markdown file with frontmatter, that uses the same authoring format as the rest of the docs.
## Creating a new pipeline ## Creating a new pipeline
Each pipeline should be evaluated individually to determine the best architecture for simplicity, maintainability, and requirements. Each pipeline should be evaluated individually to determine the best architecture for simplicity, maintainability, and requirements.
For example: For example:
- Is the content being displayed basic Markdown content? For example, does the content avoid using complex tables and interactive elements? If so, then writing the Markdown content directly and avoiding the need to create a structured data file that requires a React component may be the best approach. This was the case for the CodeQL CLI pipeline. One caveat to think about before writing Markdown directly is whether the content will need liquid versioning. The current pipeline that writes Markdown directly does not need to use liquid versioning. Liquid versioning which would increase the complexity quite a bit. All of the Markdown content in each article that is generated from the CodeQL CLI pipeline applies to all versions listed in the `versions` frontmatter property, simplifying the Markdown generation process. - Is the content being displayed basic Markdown content? For example, does the content avoid using complex tables and interactive elements? If so, then writing the Markdown content directly and avoiding the need to create a structured data file that requires a React component may be the best approach. This was the case for the CodeQL CLI pipeline. One caveat to think about before writing Markdown directly is whether the content will need liquid versioning. The current pipeline that writes Markdown directly does not need to use liquid versioning. Liquid versioning which would increase the complexity quite a bit. All of the Markdown content in each article that is generated from the CodeQL CLI pipeline applies to all versions listed in the `versions` frontmatter property, simplifying the Markdown generation process.
- Is the page interactive like the REST and Webhooks pages? If so, then the data will likely need to be structured data. In that case, a new React component may be needed to display the data. - Is the page interactive like the REST and Webhooks pages? If so, then the data will likely need to be structured data. In that case, a new React component may be needed to display the data.

View File

@@ -6,4 +6,4 @@
"rest", "rest",
"webhooks" "webhooks"
] ]
} }

View File

@@ -9,6 +9,57 @@ import { difference, isEqual } from 'lodash-es'
import { allVersions } from '#src/versions/lib/all-versions.js' import { allVersions } from '#src/versions/lib/all-versions.js'
import getApplicableVersions from '#src/versions/lib/get-applicable-versions.js' import getApplicableVersions from '#src/versions/lib/get-applicable-versions.js'
import type { MarkdownFrontmatter } from '@/types'
// Type definitions - extending existing type to add missing fields and make most fields optional
type FrontmatterData = Partial<MarkdownFrontmatter> & {
autogenerated?: string
[key: string]: any
}
type SourceContentItem = {
data: FrontmatterData
content: string
}
type SourceContent = {
[key: string]: SourceContentItem
}
type IndexOrder = {
[key: string]: {
startsWith?: string[]
}
}
type UpdateContentDirectoryOptions = {
targetDirectory: string
sourceContent: SourceContent
frontmatter: FrontmatterData
indexOrder?: IndexOrder
}
type UpdateDirectoryOptions = {
rootDirectoryOnly?: boolean
shortTitle?: boolean
indexOrder?: IndexOrder
}
type ChildUpdates = {
itemsToAdd: string[]
itemsToRemove: string[]
}
type DirectoryInfo = {
directoryContents: string[]
directoryFiles: string[]
childDirectories: string[]
}
type ChildrenComparison = {
childrenOnDisk: string[]
indexChildren: string[]
}
const ROOT_INDEX_FILE = 'content/index.md' const ROOT_INDEX_FILE = 'content/index.md'
export const MARKDOWN_COMMENT = '\n<!-- Content after this section is automatically generated -->\n' export const MARKDOWN_COMMENT = '\n<!-- Content after this section is automatically generated -->\n'
@@ -20,7 +71,7 @@ export async function updateContentDirectory({
sourceContent, sourceContent,
frontmatter, frontmatter,
indexOrder, indexOrder,
}) { }: UpdateContentDirectoryOptions): Promise<void> {
const sourceFiles = Object.keys(sourceContent) const sourceFiles = Object.keys(sourceContent)
await createDirectory(targetDirectory) await createDirectory(targetDirectory)
await removeMarkdownFiles(targetDirectory, sourceFiles, frontmatter.autogenerated) await removeMarkdownFiles(targetDirectory, sourceFiles, frontmatter.autogenerated)
@@ -28,7 +79,11 @@ export async function updateContentDirectory({
} }
// Remove markdown files that are no longer in the source data // Remove markdown files that are no longer in the source data
async function removeMarkdownFiles(targetDirectory, sourceFiles, autogeneratedType) { async function removeMarkdownFiles(
targetDirectory: string,
sourceFiles: string[],
autogeneratedType: string | undefined,
): Promise<void> {
// Copy the autogenerated Markdown files to the target directory // Copy the autogenerated Markdown files to the target directory
const autogeneratedFiles = await getAutogeneratedFiles(targetDirectory, autogeneratedType) const autogeneratedFiles = await getAutogeneratedFiles(targetDirectory, autogeneratedType)
// If the first array contains items that the second array does not, // If the first array contains items that the second array does not,
@@ -42,29 +97,37 @@ async function removeMarkdownFiles(targetDirectory, sourceFiles, autogeneratedTy
// Gets a list of all files under targetDirectory that have the // Gets a list of all files under targetDirectory that have the
// `autogenerated` frontmatter set to `autogeneratedType`. // `autogenerated` frontmatter set to `autogeneratedType`.
async function getAutogeneratedFiles(targetDirectory, autogeneratedType) { async function getAutogeneratedFiles(
targetDirectory: string,
autogeneratedType: string | undefined,
): Promise<string[]> {
const files = walk(targetDirectory, { const files = walk(targetDirectory, {
includeBasePath: true, includeBasePath: true,
childDirectories: false, directories: false,
globs: ['**/*.md'], globs: ['**/*.md'],
ignore: ['**/README.md', '**/index.md'], ignore: ['**/README.md', '**/index.md'],
}) })
return ( return (
await Promise.all( await Promise.all(
files.map(async (file) => { files.map(async (file: string) => {
const { data } = matter(await readFile(file, 'utf-8')) const { data } = matter(await readFile(file, 'utf-8'))
if (data.autogenerated === autogeneratedType) { if (data.autogenerated === autogeneratedType) {
return file return file
} }
}), }),
) )
).filter(Boolean) ).filter(Boolean) as string[]
} }
// The `sourceContent` object contains the new content and target file // The `sourceContent` object contains the new content and target file
// path for the Markdown files. Ex: // path for the Markdown files. Ex:
// { <targetFile>: { data: <frontmatter>, content: <markdownContent> } } // { <targetFile>: { data: <frontmatter>, content: <markdownContent> } }
async function updateMarkdownFiles(targetDirectory, sourceContent, frontmatter, indexOrder = {}) { async function updateMarkdownFiles(
targetDirectory: string,
sourceContent: SourceContent,
frontmatter: FrontmatterData,
indexOrder: IndexOrder = {},
): Promise<void> {
for (const [file, newContent] of Object.entries(sourceContent)) { for (const [file, newContent] of Object.entries(sourceContent)) {
await updateMarkdownFile(file, newContent.data, newContent.content) await updateMarkdownFile(file, newContent.data, newContent.content)
} }
@@ -82,11 +145,11 @@ async function updateMarkdownFiles(targetDirectory, sourceContent, frontmatter,
// edit the modifiable content of the file. If the Markdown file doesn't // edit the modifiable content of the file. If the Markdown file doesn't
// exists, we create a new Markdown file. // exists, we create a new Markdown file.
async function updateMarkdownFile( async function updateMarkdownFile(
file, file: string,
sourceData, sourceData: FrontmatterData,
sourceContent, sourceContent: string,
commentDelimiter = MARKDOWN_COMMENT, commentDelimiter: string = MARKDOWN_COMMENT,
) { ): Promise<void> {
if (existsSync(file)) { if (existsSync(file)) {
// update only the versions property of the file, assuming // update only the versions property of the file, assuming
// the other properties have already been added and edited // the other properties have already been added and edited
@@ -132,10 +195,10 @@ async function updateMarkdownFile(
// ensure that the Markdown files have been updated and any files // ensure that the Markdown files have been updated and any files
// that need to be deleted have been removed. // that need to be deleted have been removed.
async function updateDirectory( async function updateDirectory(
directory, directory: string,
frontmatter, frontmatter: FrontmatterData,
{ rootDirectoryOnly = false, shortTitle = false, indexOrder = {} } = {}, { rootDirectoryOnly = false, shortTitle = false, indexOrder = {} }: UpdateDirectoryOptions = {},
) { ): Promise<void> {
const initialDirectoryListing = await getDirectoryInfo(directory) const initialDirectoryListing = await getDirectoryInfo(directory)
// If there are no children on disk, remove the directory // If there are no children on disk, remove the directory
if (initialDirectoryListing.directoryContents.length === 0 && !rootDirectoryOnly) { if (initialDirectoryListing.directoryContents.length === 0 && !rootDirectoryOnly) {
@@ -162,7 +225,7 @@ async function updateDirectory(
const { childrenOnDisk, indexChildren } = getChildrenToCompare( const { childrenOnDisk, indexChildren } = getChildrenToCompare(
indexFile, indexFile,
directoryContents, directoryContents,
data.children, data.children || [],
) )
const itemsToAdd = difference(childrenOnDisk, indexChildren) const itemsToAdd = difference(childrenOnDisk, indexChildren)
@@ -199,12 +262,16 @@ async function updateDirectory(
// Children properties include a leading slash except when the // Children properties include a leading slash except when the
// index.md file is the root index.md file. We also want to // index.md file is the root index.md file. We also want to
// remove the file extension from the files on disk. // remove the file extension from the files on disk.
function getChildrenToCompare(indexFile, directoryContents, fmChildren) { function getChildrenToCompare(
indexFile: string,
directoryContents: string[],
fmChildren: string[] | undefined,
): ChildrenComparison {
if (!fmChildren) { if (!fmChildren) {
throw new Error(`No children property found in ${indexFile}`) throw new Error(`No children property found in ${indexFile}`)
} }
const isEarlyAccess = (item) => isRootIndexFile(indexFile) && item === 'early-access' const isEarlyAccess = (item: string) => isRootIndexFile(indexFile) && item === 'early-access'
// Get the list of children from the directory contents // Get the list of children from the directory contents
const childrenOnDisk = directoryContents const childrenOnDisk = directoryContents
@@ -233,18 +300,24 @@ function getChildrenToCompare(indexFile, directoryContents, fmChildren) {
// //
// 3. If the index file is not autogenerated, we leave the ordering // 3. If the index file is not autogenerated, we leave the ordering
// as is and append new children to the end. // as is and append new children to the end.
function updateIndexChildren(data, childUpdates, indexFile, indexOrder, rootIndex = false) { function updateIndexChildren(
data: FrontmatterData,
childUpdates: ChildUpdates,
indexFile: string,
indexOrder: IndexOrder,
rootIndex: boolean = false,
): FrontmatterData {
const { itemsToAdd, itemsToRemove } = childUpdates const { itemsToAdd, itemsToRemove } = childUpdates
const childPrefix = rootIndex ? '' : '/' const childPrefix = rootIndex ? '' : '/'
// Get a new list of children with added and removed items // Get a new list of children with added and removed items
const children = [...data.children] const children = [...(data.children || [])]
// remove the '/' prefix used in index.md children // remove the '/' prefix used in index.md children
.map((item) => item.replace(childPrefix, '')) .map((item) => item.replace(childPrefix, ''))
.filter((item) => !itemsToRemove.includes(item)) .filter((item) => !itemsToRemove.includes(item))
children.push(...itemsToAdd) children.push(...itemsToAdd)
const orderedIndexChildren = [] const orderedIndexChildren: string[] = []
// Only used for tests. During testing, the content directory is // Only used for tests. During testing, the content directory is
// in a temp directory so the paths are not relative to // in a temp directory so the paths are not relative to
@@ -280,7 +353,11 @@ function updateIndexChildren(data, childUpdates, indexFile, indexOrder, rootInde
// Gets the contents of the index.md file from disk if it exits or // Gets the contents of the index.md file from disk if it exits or
// creates a new index.md file with the default frontmatter. // creates a new index.md file with the default frontmatter.
async function getIndexFileContents(indexFile, frontmatter, shortTitle = false) { async function getIndexFileContents(
indexFile: string,
frontmatter: FrontmatterData,
shortTitle: boolean = false,
): Promise<{ data: FrontmatterData; content: string }> {
const directory = path.dirname(indexFile) const directory = path.dirname(indexFile)
const indexFileContent = { const indexFileContent = {
data: { data: {
@@ -300,8 +377,11 @@ async function getIndexFileContents(indexFile, frontmatter, shortTitle = false)
// Builds the index.md versions frontmatter by consolidating // Builds the index.md versions frontmatter by consolidating
// the versions from each Markdown file in the directory + the // the versions from each Markdown file in the directory + the
// index.md files in any subdirectories of directory. // index.md files in any subdirectories of directory.
async function getIndexFileVersions(directory, files) { async function getIndexFileVersions(
const versions = new Set() directory: string,
files: string[],
): Promise<{ [key: string]: string }> {
const versions = new Set<string>()
await Promise.all( await Promise.all(
files.map(async (file) => { files.map(async (file) => {
const filepath = path.join(directory, file) const filepath = path.join(directory, file)
@@ -319,7 +399,7 @@ async function getIndexFileVersions(directory, files) {
throw new Error(`Frontmatter in ${filepath} does not contain versions.`) throw new Error(`Frontmatter in ${filepath} does not contain versions.`)
} }
const fmVersions = getApplicableVersions(data.versions) const fmVersions = getApplicableVersions(data.versions)
fmVersions.forEach((version) => versions.add(version)) fmVersions.forEach((version: string) => versions.add(version))
}), }),
) )
const versionArray = [...versions] const versionArray = [...versions]
@@ -343,9 +423,11 @@ and returns the frontmatter equivalent JSON:
ghes: '*' ghes: '*'
} }
*/ */
export async function convertVersionsToFrontmatter(versions) { export async function convertVersionsToFrontmatter(
const frontmatterVersions = {} versions: string[],
const numberedReleases = {} ): Promise<{ [key: string]: string }> {
const frontmatterVersions: { [key: string]: string } = {}
const numberedReleases: { [key: string]: { availableReleases: (string | undefined)[] } } = {}
// Currently, only GHES is numbered. Number releases have to be // Currently, only GHES is numbered. Number releases have to be
// handled differently because they use semantic versioning. // handled differently because they use semantic versioning.
@@ -362,7 +444,9 @@ export async function convertVersionsToFrontmatter(versions) {
// a release is no longer supported. // a release is no longer supported.
const i = docsVersion.releases.indexOf(docsVersion.currentRelease) const i = docsVersion.releases.indexOf(docsVersion.currentRelease)
if (!numberedReleases[docsVersion.shortName]) { if (!numberedReleases[docsVersion.shortName]) {
const availableReleases = Array(docsVersion.releases.length).fill(undefined) const availableReleases: (string | undefined)[] = Array(docsVersion.releases.length).fill(
undefined,
)
availableReleases[i] = docsVersion.currentRelease availableReleases[i] = docsVersion.currentRelease
numberedReleases[docsVersion.shortName] = { numberedReleases[docsVersion.shortName] = {
availableReleases, availableReleases,
@@ -388,7 +472,7 @@ export async function convertVersionsToFrontmatter(versions) {
.join(' || ') .join(' || ')
frontmatterVersions[key] = semVer frontmatterVersions[key] = semVer
} else { } else {
const semVer = [] const semVer: string[] = []
if (!availableReleases[availableReleases.length - 1]) { if (!availableReleases[availableReleases.length - 1]) {
const startVersion = availableReleases.filter(Boolean).pop() const startVersion = availableReleases.filter(Boolean).pop()
semVer.push(`>=${startVersion}`) semVer.push(`>=${startVersion}`)
@@ -402,7 +486,7 @@ export async function convertVersionsToFrontmatter(versions) {
}) })
const sortedFrontmatterVersions = Object.keys(frontmatterVersions) const sortedFrontmatterVersions = Object.keys(frontmatterVersions)
.sort() .sort()
.reduce((acc, key) => { .reduce((acc: { [key: string]: string }, key) => {
acc[key] = frontmatterVersions[key] acc[key] = frontmatterVersions[key]
return acc return acc
}, {}) }, {})
@@ -412,7 +496,7 @@ export async function convertVersionsToFrontmatter(versions) {
// This is uncommon, but we potentially could have the case where an // This is uncommon, but we potentially could have the case where an
// article was versioned for say 3.2, not for 3.3, and then again // article was versioned for say 3.2, not for 3.3, and then again
// versioned for 3.4. This will result in a custom semantic version range // versioned for 3.4. This will result in a custom semantic version range
function checkVersionContinuity(versions) { function checkVersionContinuity(versions: (string | undefined)[]): boolean {
const availableVersions = [...versions] const availableVersions = [...versions]
// values at the beginning or end of the array are not gaps but normal // values at the beginning or end of the array are not gaps but normal
@@ -427,18 +511,18 @@ function checkVersionContinuity(versions) {
} }
// Returns true if the indexFile is the root index.md file // Returns true if the indexFile is the root index.md file
function isRootIndexFile(indexFile) { function isRootIndexFile(indexFile: string): boolean {
return indexFile === ROOT_INDEX_FILE return indexFile === ROOT_INDEX_FILE
} }
// Creates a new directory if it doesn't exist // Creates a new directory if it doesn't exist
async function createDirectory(targetDirectory) { async function createDirectory(targetDirectory: string): Promise<void> {
if (!existsSync(targetDirectory)) { if (!existsSync(targetDirectory)) {
await mkdirp(targetDirectory) await mkdirp(targetDirectory)
} }
} }
async function getDirectoryInfo(directory) { async function getDirectoryInfo(directory: string): Promise<DirectoryInfo> {
if (!existsSync(directory)) { if (!existsSync(directory)) {
throw new Error(`Directory ${directory} did not exist when attempting to get directory info.`) throw new Error(`Directory ${directory} did not exist when attempting to get directory info.`)
} }
@@ -454,7 +538,7 @@ async function getDirectoryInfo(directory) {
return { directoryContents, directoryFiles, childDirectories } return { directoryContents, directoryFiles, childDirectories }
} }
function appendVersionComment(stringifiedContent) { function appendVersionComment(stringifiedContent: string): string {
return stringifiedContent.replace( return stringifiedContent.replace(
'\nversions:\n', '\nversions:\n',
`\nversions: # DO NOT MANUALLY EDIT. CHANGES WILL BE OVERWRITTEN BY A 🤖\n`, `\nversions: # DO NOT MANUALLY EDIT. CHANGES WILL BE OVERWRITTEN BY A 🤖\n`,

View File

@@ -6,20 +6,31 @@ import { describe, expect, test, vi } from 'vitest'
import { loadPages } from '#src/frame/lib/page-data.js' import { loadPages } from '#src/frame/lib/page-data.js'
import { get } from '#src/tests/helpers/e2etest.js' import { get } from '#src/tests/helpers/e2etest.js'
// Type definitions for page objects
type Page = {
autogenerated?: boolean
fullPath: string
permalinks: Array<{ href: string }>
versions: {
feature?: any
[key: string]: any
}
}
// Get a list of the autogenerated pages // Get a list of the autogenerated pages
const pageList = await loadPages(undefined, ['en']) const pageList: Page[] = await loadPages(undefined, ['en'])
describe('autogenerated docs render', () => { describe('autogenerated docs render', () => {
vi.setConfig({ testTimeout: 3 * 60 * 1000 }) vi.setConfig({ testTimeout: 3 * 60 * 1000 })
const autogeneratedPages = pageList.filter((page) => page.autogenerated) const autogeneratedPages = pageList.filter((page: Page) => page.autogenerated)
test('all automated pages', async () => { test('all automated pages', async () => {
// Each page should render with 200 OK. Also, check for duplicate // Each page should render with 200 OK. Also, check for duplicate
// heading IDs on each page. // heading IDs on each page.
const errors = ( const errors = (
await Promise.all( await Promise.all(
autogeneratedPages.map(async (page) => { autogeneratedPages.map(async (page: Page) => {
const url = page.permalinks[0].href const url = page.permalinks[0].href
// Some autogenerated pages can be very slow and might fail. // Some autogenerated pages can be very slow and might fail.
// So we allow a few retries to avoid false positives. // So we allow a few retries to avoid false positives.
@@ -34,21 +45,25 @@ describe('autogenerated docs render', () => {
.map((_, el) => $(el).attr('id')) .map((_, el) => $(el).attr('id'))
.get() .get()
.sort() .sort()
const dupes = headingIDs.filter((item, index) => headingIDs.indexOf(item) !== index) const dupes = headingIDs.filter(
(item: string, index: number) => headingIDs.indexOf(item) !== index,
)
if (dupes.length) { if (dupes.length) {
return `In ${url}, the following duplicate heading IDs were found: ${dupes.join(', ')}` return `In ${url}, the following duplicate heading IDs were found: ${dupes.join(', ')}`
} }
}), }),
) )
).filter(Boolean) ).filter(Boolean) as string[]
expect(errors.length, errors.join('\n')).toBe(0) expect(errors.length, errors.join('\n')).toBe(0)
}) })
const codeqlCliPath = JSON.parse( const codeqlCliPath: string = JSON.parse(
readFileSync('src/codeql-cli/lib/config.json', 'utf-8'), readFileSync('src/codeql-cli/lib/config.json', 'utf-8'),
).targetDirectory ).targetDirectory
const restPath = JSON.parse(readFileSync('src/rest/lib/config.json', 'utf-8')).targetDirectory const restPath: string = JSON.parse(
const ghappsPath = JSON.parse( readFileSync('src/rest/lib/config.json', 'utf-8'),
).targetDirectory
const ghappsPath: string = JSON.parse(
readFileSync('src/github-apps/lib/config.json', 'utf-8'), readFileSync('src/github-apps/lib/config.json', 'utf-8'),
).targetDirectory ).targetDirectory
// Right now only the rest and codeqlcli pages get their frontmatter updated automatically. // Right now only the rest and codeqlcli pages get their frontmatter updated automatically.
@@ -56,14 +71,14 @@ describe('autogenerated docs render', () => {
// single pages. The apps pages are also nested inside of the rest pages. So we want to filter out only // single pages. The apps pages are also nested inside of the rest pages. So we want to filter out only
// rest pages and the codeql cli pages for this test. // rest pages and the codeql cli pages for this test.
const filesWithAutoUpdatedVersions = autogeneratedPages.filter( const filesWithAutoUpdatedVersions = autogeneratedPages.filter(
(page) => (page: Page) =>
(!page.fullPath.startsWith(ghappsPath) && page.fullPath.startsWith(restPath)) || (!page.fullPath.startsWith(ghappsPath) && page.fullPath.startsWith(restPath)) ||
page.fullPath.startsWith(codeqlCliPath), page.fullPath.startsWith(codeqlCliPath),
) )
test.each(filesWithAutoUpdatedVersions)( test.each(filesWithAutoUpdatedVersions)(
'autogenerated page $fullPath does not use feature based versioning', 'autogenerated page $fullPath does not use feature based versioning',
(page) => { (page: Page) => {
expect(page.versions.feature).toBe(undefined) expect(page.versions.feature).toBe(undefined)
}, },
) )

View File

@@ -6,15 +6,36 @@ import path from 'path'
import { afterAll, beforeAll, describe, expect, test } from 'vitest' import { afterAll, beforeAll, describe, expect, test } from 'vitest'
import { mkdirp } from 'mkdirp' import { mkdirp } from 'mkdirp'
import matter from 'gray-matter' import matter from 'gray-matter'
import type { FrontmatterVersions } from '#src/types.js'
import { updateContentDirectory } from '../lib/update-markdown.js' import { updateContentDirectory } from '../lib/update-markdown.js'
const versions = { // Type definitions
type ContentItem = {
data: {
title: string
versions: FrontmatterVersions
autogenerated: string
}
content: string
}
type NewContentData = {
[key: string]: ContentItem
}
type IndexOrder = {
[key: string]: {
startsWith: string[]
}
}
const versions: FrontmatterVersions = {
fpt: '*', fpt: '*',
ghec: '*', ghec: '*',
ghes: '*', ghes: '*',
} }
const newContentData = { const newContentData: NewContentData = {
'actions/secrets.md': { 'actions/secrets.md': {
data: { data: {
title: 'Secrets', title: 'Secrets',
@@ -41,10 +62,10 @@ const newContentData = {
}, },
} }
const tempDirectory = `${tmpdir()}/update-content-directory-test` const tempDirectory: string = `${tmpdir()}/update-content-directory-test`
const tempContentDirectory = `${tempDirectory}/content` const tempContentDirectory: string = `${tempDirectory}/content`
const targetDirectory = path.join(tempContentDirectory, 'rest') const targetDirectory: string = path.join(tempContentDirectory, 'rest')
const indexOrder = { const indexOrder: IndexOrder = {
'content/rest/index.md': { 'content/rest/index.md': {
startsWith: ['overview', 'guides'], startsWith: ['overview', 'guides'],
}, },
@@ -66,9 +87,9 @@ describe('automated content directory updates', () => {
// because outside of testing it only runs in the docs-internal repo. // because outside of testing it only runs in the docs-internal repo.
// Because of that, we need to update the content paths to use the // Because of that, we need to update the content paths to use the
// full file path. // full file path.
const contentDataFullPath = {} const contentDataFullPath: { [key: string]: ContentItem } = {}
Object.keys(newContentData).forEach( Object.keys(newContentData).forEach(
(key) => (contentDataFullPath[path.join(targetDirectory, key)] = newContentData[key]), (key: string) => (contentDataFullPath[path.join(targetDirectory, key)] = newContentData[key]),
) )
// Rewrites the content directory in the operating system's // Rewrites the content directory in the operating system's