1
0
mirror of synced 2025-12-19 18:10:59 -05:00

Convert 27 JavaScript files to TypeScript (#57693)

This commit is contained in:
Kevin Heis
2025-09-30 13:44:12 -07:00
committed by GitHub
parent 647e0dca2a
commit 501e2512d7
31 changed files with 374 additions and 176 deletions

View File

@@ -2,7 +2,29 @@ import fs from 'fs'
import path from 'path'
import frontmatter from '@gr2m/gray-matter'
import { getLogLevelNumber } from './src/observability/logger/lib/log-levels.js'
// Hardcoded log level function since next.config.js cannot import from TypeScript files
// Matches ./src/observability/logger/lib/log-levels
function getLogLevelNumber() {
const LOG_LEVELS = {
error: 0,
warn: 1,
info: 2,
debug: 3,
}
let defaultLogLevel = 'info'
if (
!process.env.LOG_LEVEL &&
(process.env.NODE_ENV === 'production' || process.env.NODE_ENV === 'test')
) {
defaultLogLevel = 'debug'
}
const envLogLevel = process.env.LOG_LEVEL?.toLowerCase() || defaultLogLevel
const logLevel = LOG_LEVELS[envLogLevel] !== undefined ? envLogLevel : defaultLogLevel
return LOG_LEVELS[logLevel]
}
// Replace imports with hardcoded values
const ROOT = process.env.ROOT || '.'

View File

@@ -1,41 +0,0 @@
// This function takes an array of AJV errors and formats them
// in a way that is more compatible with Markdownlint errors.
export function formatAjvErrors(errors = []) {
return errors.map((errorObj) => {
const error = {}
// An instancePath is either blank or starts with a slash
// and separates object properties with slashes. A more
// common way to read object nesting is using dot notation.
error.instancePath =
errorObj.instancePath === ''
? errorObj.instancePath
: errorObj.instancePath.slice(1).replace('/', '.')
if (errorObj.keyword === 'additionalProperties') {
error.detail = 'The frontmatter includes an unsupported property.'
const pathContext = error.instancePath ? ` from \`${error.instancePath}\`` : ''
error.context = `Remove the property \`${errorObj.params.additionalProperty}\`${pathContext}.`
error.errorProperty = errorObj.params.additionalProperty
error.searchProperty = error.errorProperty
return error
}
if (errorObj.keyword === 'required') {
error.detail = 'The frontmatter has a missing required property'
const pathContext = error.instancePath ? ` from \`${error.instancePath}\`` : ''
error.context = `Add the missing property \`${errorObj.params.missingProperty}\`${pathContext}`
error.errorProperty = errorObj.params.missingProperty
error.searchProperty = error.instancePath.split('.').pop()
return error
}
// The two most common errors are required and additionalProperties.
// This catches any other with a generic detail that uses the AJV wording.
error.detail = `Frontmatter ${errorObj.message}.`
error.context = Object.values(errorObj.params).join('')
error.errorProperty = error.context
error.searchProperty = error.errorProperty
return error
})
}

View File

@@ -0,0 +1,79 @@
import { getFrontmatter } from './utils'
// AJV validation error object structure
interface AjvValidationError {
instancePath: string
keyword: string
message: string
params: {
additionalProperty?: string
missingProperty?: string
[key: string]: unknown
}
}
// Processed error object for markdown linting
interface ProcessedValidationError {
instancePath: string
detail: string
context: string
errorProperty: string
searchProperty: string
}
export function formatAjvErrors(errors: AjvValidationError[] = []): ProcessedValidationError[] {
const processedErrors: ProcessedValidationError[] = []
errors.forEach((errorObj: AjvValidationError) => {
const error: Partial<ProcessedValidationError> = {}
error.instancePath =
errorObj.instancePath === ''
? errorObj.instancePath
: errorObj.instancePath.slice(1).replace('/', '.')
if (errorObj.keyword === 'additionalProperties') {
error.detail = 'The frontmatter includes an unsupported property.'
const pathContext = error.instancePath ? ` from \`${error.instancePath}\`` : ''
error.context = `Remove the property \`${errorObj.params.additionalProperty}\`${pathContext}.`
error.errorProperty = errorObj.params.additionalProperty
error.searchProperty = error.errorProperty
}
// required rule
if (errorObj.keyword === 'required') {
error.detail = 'The frontmatter has a missing required property'
const pathContext = error.instancePath ? ` from \`${error.instancePath}\`` : ''
error.context = `Add the missing property \`${errorObj.params.missingProperty}\`${pathContext}`
error.errorProperty = errorObj.params.missingProperty
error.searchProperty = error.instancePath.split('.').pop()
}
// all other rules
if (!error.detail) {
error.detail = `Frontmatter ${errorObj.message}.`
error.context = Object.values(errorObj.params).join('')
error.errorProperty = error.context
error.searchProperty = error.errorProperty
}
processedErrors.push(error as ProcessedValidationError)
})
return processedErrors
}
// Alias for backward compatibility
export const processSchemaValidationErrors = formatAjvErrors
// Schema validator interface - generic due to different schema types (AJV, JSON Schema, etc.)
interface SchemaValidator {
validate(data: unknown): boolean
}
export function getSchemaValidator(
frontmatterLines: string[],
): (schema: SchemaValidator) => boolean {
const frontmatter = getFrontmatter(frontmatterLines)
return (schema: SchemaValidator) => schema.validate(frontmatter)
}

View File

@@ -1,4 +1,6 @@
// @ts-ignore - markdownlint-rule-helpers doesn't have TypeScript declarations
import { addError } from 'markdownlint-rule-helpers'
import type { RuleParams, RuleErrorCallback } from '../../types'
import { getFrontmatter } from '../helpers/utils'
@@ -7,7 +9,7 @@ export const frontmatterHiddenDocs = {
description:
'Articles with frontmatter property `hidden` can only be located in specific products',
tags: ['frontmatter', 'feature', 'early-access'],
function: (params, onError) => {
function: (params: RuleParams, onError: RuleErrorCallback) => {
const fm = getFrontmatter(params.lines)
if (!fm || !fm.hidden) return
@@ -24,7 +26,8 @@ export const frontmatterHiddenDocs = {
if (allowedProductPaths.some((allowedPath) => params.name.includes(allowedPath))) return
const hiddenLine = params.lines.find((line) => line.startsWith('hidden:'))
const hiddenLine = params.lines.find((line: string) => line.startsWith('hidden:'))
if (!hiddenLine) return
const lineNumber = params.lines.indexOf(hiddenLine) + 1
addError(

View File

@@ -1,16 +1,18 @@
// @ts-ignore - markdownlint-rule-helpers doesn't have TypeScript declarations
import { addError, ellipsify } from 'markdownlint-rule-helpers'
import type { RuleParams, RuleErrorCallback } from '../../types'
import { getRange } from '../helpers/utils'
/*
This rule currently only checks for one hardcoded string but
can be generalized in the future to check for strings that
can be generalized in the future to check for strings that
have data reusables.
*/
export const githubOwnedActionReferences = {
names: ['GHD013', 'github-owned-action-references'],
description: 'GitHub-owned action references should not be hardcoded',
tags: ['feature', 'actions'],
function: (params, onError) => {
function: (params: RuleParams, onError: RuleErrorCallback) => {
const filepath = params.name
if (filepath.startsWith('data/reusables/actions/action-')) return

View File

@@ -1,11 +1,13 @@
// @ts-ignore - markdownlint-rule-helpers doesn't have TypeScript declarations
import { addError, ellipsify } from 'markdownlint-rule-helpers'
import type { RuleParams, RuleErrorCallback } from '../../types'
import { getRange } from '../helpers/utils'
import frontmatter from '@/frame/lib/read-frontmatter'
/*
This rule currently only checks for one hardcoded string but
can be generalized in the future to check for strings that
can be generalized in the future to check for strings that
have data variables.
*/
export const hardcodedDataVariable = {
@@ -13,7 +15,7 @@ export const hardcodedDataVariable = {
description:
'Strings that contain "personal access token" should use the product variable instead',
tags: ['single-source'],
function: (params, onError) => {
function: (params: RuleParams, onError: RuleErrorCallback) => {
if (params.name.startsWith('data/variables/product.yml')) return
const frontmatterString = params.frontMatterLines.join('\n')
const fm = frontmatter(frontmatterString).data

View File

@@ -5,14 +5,15 @@ import {
isStringQuoted,
isStringPunctuated,
} from '../helpers/utils'
import type { RuleParams, RuleErrorCallback } from '../../types'
export const imageAltTextEndPunctuation = {
names: ['GHD032', 'image-alt-text-end-punctuation'],
description: 'Alternate text for images should end with punctuation',
tags: ['accessibility', 'images'],
parser: 'markdownit',
function: (params, onError) => {
forEachInlineChild(params, 'image', function forToken(token) {
function: (params: RuleParams, onError: RuleErrorCallback) => {
forEachInlineChild(params, 'image', function forToken(token: any) {
const imageAltText = token.content.trim()
// If the alt text is empty, there is nothing to check and you can't

View File

@@ -1,17 +1,26 @@
// @ts-ignore - markdownlint-rule-helpers doesn't have TypeScript declarations
import { addError } from 'markdownlint-rule-helpers'
import type { RuleParams, RuleErrorCallback } from '../../types'
import { liquid } from '@/content-render/index'
import { allVersions } from '@/versions/lib/all-versions'
import { forEachInlineChild, getRange } from '../helpers/utils'
interface ImageToken {
content: string
lineNumber: number
line: string
range: [number, number]
}
export const incorrectAltTextLength = {
names: ['GHD033', 'incorrect-alt-text-length'],
description: 'Images alternate text should be between 40-150 characters',
tags: ['accessibility', 'images'],
parser: 'markdownit',
asynchronous: true,
function: (params, onError) => {
forEachInlineChild(params, 'image', async function forToken(token) {
function: (params: RuleParams, onError: RuleErrorCallback) => {
forEachInlineChild(params, 'image', async function forToken(token: ImageToken) {
let renderedString = token.content
if (token.content.includes('{%') || token.content.includes('{{')) {

View File

@@ -1,4 +1,6 @@
// @ts-ignore - markdownlint-rule-helpers doesn't have TypeScript declarations
import { addError, filterTokens } from 'markdownlint-rule-helpers'
import type { RuleParams, RuleErrorCallback } from '../../types'
import { doesStringEndWithPeriod, getRange, isStringQuoted } from '../helpers/utils'
@@ -7,8 +9,8 @@ export const linkPunctuation = {
description: 'Internal link titles must not contain punctuation',
tags: ['links', 'url'],
parser: 'markdownit',
function: (params, onError) => {
filterTokens(params, 'inline', (token) => {
function: (params: RuleParams, onError: RuleErrorCallback) => {
filterTokens(params, 'inline', (token: any) => {
const { children, line } = token
let inLink = false
for (const child of children) {

View File

@@ -18,7 +18,8 @@ describe('lint learning tracks', () => {
if (yamlFileList.length < 1) return
describe.each(yamlFileList)('%s', (yamlAbsPath) => {
let yamlContent
// Using any type because YAML content structure is dynamic and varies per file
let yamlContent: any
beforeAll(async () => {
const fileContents = await readFile(yamlAbsPath, 'utf8')
@@ -26,8 +27,10 @@ describe('lint learning tracks', () => {
})
test('contains valid liquid', () => {
const toLint = []
Object.values(yamlContent).forEach(({ title, description }) => {
// Using any[] for toLint since it contains mixed string content from various YAML properties
const toLint: any[] = []
// Using any for destructured params as YAML structure varies across different learning track files
Object.values(yamlContent).forEach(({ title, description }: any) => {
toLint.push(title)
toLint.push(description)
})

View File

@@ -2,6 +2,7 @@ import { describe, expect, test } from 'vitest'
import { runRule } from '../../lib/init-test'
import { incorrectAltTextLength } from '../../lib/linting-rules/image-alt-text-length'
import type { Rule } from '../../types'
describe(incorrectAltTextLength.names.join(' - '), () => {
test('image with incorrect alt text length fails', async () => {
@@ -9,7 +10,7 @@ describe(incorrectAltTextLength.names.join(' - '), () => {
`![${'x'.repeat(39)}](./image.png)`,
`![${'x'.repeat(151)}](./image.png)`,
].join('\n')
const result = await runRule(incorrectAltTextLength, { strings: { markdown } })
const result = await runRule(incorrectAltTextLength as Rule, { strings: { markdown } })
const errors = result.markdown
expect(errors.length).toBe(2)
expect(errors[0].lineNumber).toBe(1)
@@ -22,7 +23,7 @@ describe(incorrectAltTextLength.names.join(' - '), () => {
`![${'x'.repeat(40)}](./image.png)`,
`![${'x'.repeat(150)}](./image.png)`,
].join('\n')
const result = await runRule(incorrectAltTextLength, { strings: { markdown } })
const result = await runRule(incorrectAltTextLength as Rule, { strings: { markdown } })
const errors = result.markdown
expect(errors.length).toBe(0)
})
@@ -33,7 +34,7 @@ describe(incorrectAltTextLength.names.join(' - '), () => {
// Completely empty
'![](/images/this-is-ok.png)',
].join('\n')
const result = await runRule(incorrectAltTextLength, { strings: { markdown } })
const result = await runRule(incorrectAltTextLength as Rule, { strings: { markdown } })
const errors = result.markdown
expect(errors.length).toBe(1)
expect(errors[0].lineNumber).toBe(3)

View File

@@ -2,6 +2,7 @@ import { describe, expect, test } from 'vitest'
import { runRule } from '../../lib/init-test'
import { internalLinksNoLang } from '../../lib/linting-rules/internal-links-no-lang'
import type { Rule } from '../../types'
describe(internalLinksNoLang.names.join(' - '), () => {
test('internal links with hardcoded language codes fail', async () => {
@@ -10,7 +11,7 @@ describe(internalLinksNoLang.names.join(' - '), () => {
'[Link to just a landing page in english](/en)',
'[Korean Docs](/ko/actions)',
].join('\n')
const result = await runRule(internalLinksNoLang, { strings: { markdown } })
const result = await runRule(internalLinksNoLang as Rule, { strings: { markdown } })
const errors = result.markdown
expect(errors.length).toBe(3)
expect(errors.map((error) => error.lineNumber)).toEqual([1, 2, 3])
@@ -31,7 +32,7 @@ describe(internalLinksNoLang.names.join(' - '), () => {
// A link that starts with a language code
'[Enterprise](/enterprise/overview)',
].join('\n')
const result = await runRule(internalLinksNoLang, { strings: { markdown } })
const result = await runRule(internalLinksNoLang as Rule, { strings: { markdown } })
const errors = result.markdown
expect(errors.length).toBe(0)
})

View File

@@ -2,6 +2,7 @@ import { describe, expect, test } from 'vitest'
import { runRule } from '../../lib/init-test'
import { internalLinksOldVersion } from '../../lib/linting-rules/internal-links-old-version'
import type { Rule } from '../../types'
describe(internalLinksOldVersion.names.join(' - '), () => {
test('links with old hardcoded versioning fail', async () => {
@@ -10,7 +11,7 @@ describe(internalLinksOldVersion.names.join(' - '), () => {
'[Link to Enterprise 11.10.340](https://docs.github.com/enterprise/11.10.340/admin/yes)',
'[Enterprise 2.8](http://help.github.com/enterprise/2.8/admin/)',
].join('\n')
const result = await runRule(internalLinksOldVersion, { strings: { markdown } })
const result = await runRule(internalLinksOldVersion as Rule, { strings: { markdown } })
const errors = result.markdown
expect(errors.length).toBe(3)
expect(errors.map((error) => error.lineNumber)).toEqual([1, 2, 3])
@@ -26,7 +27,7 @@ describe(internalLinksOldVersion.names.join(' - '), () => {
// Current versioning links is excluded from this test
'[New versioning](/github/site-policy/enterprise/2.2/yes)',
].join('\n')
const result = await runRule(internalLinksOldVersion, { strings: { markdown } })
const result = await runRule(internalLinksOldVersion as Rule, { strings: { markdown } })
const errors = result.markdown
expect(errors.length).toBe(0)
})

View File

@@ -2,6 +2,7 @@ import { describe, expect, test } from 'vitest'
import { runRule } from '../../lib/init-test'
import { linkQuotation } from '../../lib/linting-rules/link-quotation'
import type { Rule } from '../../types'
describe(linkQuotation.names.join(' - '), () => {
test('links that are formatted correctly should not generate an error', async () => {
@@ -9,7 +10,7 @@ describe(linkQuotation.names.join(' - '), () => {
'Random stuff [A title](./image.png)',
'"This is a direct quote" [A title](./image.png)',
].join('\n')
const result = await runRule(linkQuotation, { strings: { markdown } })
const result = await runRule(linkQuotation as Rule, { strings: { markdown } })
const errors = result.markdown
expect(errors.length).toBe(0)
})
@@ -26,11 +27,11 @@ describe(linkQuotation.names.join(' - '), () => {
'See "[AUTOTITLE](/foo/bar)," "[AUTOTITLE](/foo/bar2)," "[AUTOTITLE](/foo/bar3)," and "[AUTOTITLE](/foo/bar4)."',
'See "[Anchor link](#anchor-link)."',
].join('\n')
const result = await runRule(linkQuotation, { strings: { markdown } })
const result = await runRule(linkQuotation as Rule, { strings: { markdown } })
const errors = result.markdown
expect(errors.length).toBe(13)
expect(errors[0].errorRange).toEqual([14, 25])
expect(errors[0].fixInfo.insertText).toBe('[A title](./image.png).')
expect(errors[1].fixInfo.insertText).toBe('[A title](./image.png)?')
expect(errors[0].fixInfo?.insertText).toBe('[A title](./image.png).')
expect(errors[1].fixInfo?.insertText).toBe('[A title](./image.png)?')
})
})

View File

@@ -1,36 +0,0 @@
export const tags = {
note: 'accent',
tip: 'success',
warning: 'attention',
danger: 'danger',
}
const template =
'<div class="ghd-alert ghd-alert-{{ color }} ghd-spotlight-{{ color }}">{{ output }}</div>'
export const Spotlight = {
type: 'block',
parse(tagToken, remainTokens) {
this.tagName = tagToken.name
this.templates = []
const stream = this.liquid.parser.parseStream(remainTokens)
stream
.on(`tag:end${this.tagName}`, () => stream.stop())
.on('template', (tpl) => this.templates.push(tpl))
.on('end', () => {
throw new Error(`tag ${tagToken.getText()} not closed`)
})
stream.start()
},
render: function* (scope) {
const output = yield this.liquid.renderer.renderTemplates(this.templates, scope)
return yield this.liquid.parseAndRender(template, {
color: tags[this.tagName],
output,
})
},
}

View File

@@ -0,0 +1,64 @@
interface LiquidToken {
name: string
getText(): string
}
interface LiquidTemplate {
[key: string]: unknown
}
interface LiquidStream {
on(event: string, callback: () => void): LiquidStream
stop(): void
start(): void
}
interface LiquidEngine {
parser: {
parseStream(tokens: LiquidToken[]): LiquidStream
}
renderer: {
renderTemplates(templates: LiquidTemplate[], scope: Record<string, unknown>): string
}
parseAndRender(template: string, context: Record<string, string>): string
}
export const tags: Record<string, string> = {
note: 'accent',
tip: 'success',
warning: 'attention',
danger: 'danger',
}
const template: string =
'<div class="ghd-alert ghd-alert-{{ color }} ghd-spotlight-{{ color }}">{{ output }}</div>'
export const Spotlight = {
type: 'block' as const,
tagName: '' as string,
templates: [] as LiquidTemplate[],
liquid: null as LiquidEngine | null,
parse(tagToken: LiquidToken, remainTokens: LiquidToken[]): void {
this.tagName = tagToken.name
this.templates = []
const stream = this.liquid!.parser.parseStream(remainTokens)
stream
.on(`tag:end${this.tagName}`, () => stream.stop())
.on('template', (tpl: LiquidTemplate) => this.templates.push(tpl))
.on('end', () => {
throw new Error(`tag ${tagToken.getText()} not closed`)
})
stream.start()
},
render: function* (scope: Record<string, unknown>): Generator<unknown, unknown, unknown> {
const output = yield this.liquid!.renderer.renderTemplates(this.templates, scope)
return yield this.liquid!.parseAndRender(template, {
color: tags[this.tagName],
output,
})
},
}

View File

@@ -1,35 +0,0 @@
import { visitParents } from 'unist-util-visit-parents'
/**
* Where it can mutate the AST to swap from:
*
* <thead>
* <tr>
* <th>...</th>
* <th>...</th>
*
* to:
* <thead>
* <tr>
* <th scope="col">...</th>
* <th scope="col">...</th>
*
* */
function matcher(node) {
return node.type === 'element' && node.tagName === 'th' && !('scope' in node.properties)
}
function visitor(node, ancestors) {
const parent = ancestors.at(-1)
if (parent && parent.tagName === 'tr') {
const grandParent = ancestors.at(-2)
if (grandParent && grandParent.tagName === 'thead') {
node.properties.scope = 'col'
}
}
}
export default function rewriteTheadThScope() {
return (tree) => visitParents(tree, matcher, visitor)
}

View File

@@ -0,0 +1,42 @@
import { visitParents } from 'unist-util-visit-parents'
import type { Root } from 'hast'
import type { Transformer } from 'unified'
/**
* Where it can mutate the AST to swap from:
*
* <thead>
* <tr>
* <th>...</th>
* <th>...</th>
*
* to:
* <thead>
* <tr>
* <th scope="col">...</th>
* <th scope="col">...</th>
*
* */
function matcher(node: any): boolean {
// Using any type due to complex type conflicts between different versions of
// @types/hast and @types/unist used by various dependencies. The node should be
// an Element with tagName 'th' and no existing 'scope' property.
return node.type === 'element' && node.tagName === 'th' && !('scope' in node.properties)
}
function visitor(node: any, ancestors: any[]): void {
// Using any type for the same reason as matcher - complex type conflicts between
// hast/unist type definitions across different package versions
const parent = ancestors.at(-1)
if (parent && parent.tagName === 'tr') {
const grandParent = ancestors.at(-2)
if (grandParent && grandParent.tagName === 'thead') {
node.properties.scope = 'col'
}
}
}
export default function rewriteTheadThScope(): Transformer<Root> {
return (tree: Root) => visitParents(tree, matcher, visitor)
}

View File

@@ -2,7 +2,7 @@ import { schema } from '@/frame/lib/frontmatter'
// Some learning tracks have `versions` blocks that match `versions` frontmatter,
// so we can import that part of the FM schema.
const versionsProps = Object.assign({}, schema.properties.versions)
const versionsProps = Object.assign({}, (schema.properties as any).versions)
// `versions` are not required in learning tracks the way they are in FM.
delete versionsProps.required

View File

@@ -1,4 +1,15 @@
import { productMap } from '@/products/lib/all-products'
interface TocItem {
type: 'category' | 'subcategory' | 'article'
href: string
}
interface Page {
relativePath: string
markdown: string
}
const productTOCs = Object.values(productMap)
.filter((product) => !product.external)
.map((product) => product.toc.replace('content/', ''))
@@ -7,7 +18,7 @@ const linkString = /{% [^}]*?link.*? \/(.*?) ?%}/m
const linksArray = new RegExp(linkString.source, 'gm')
// return an array of objects like { type: 'category|subcategory|article', href: 'path' }
export default function getTocItems(page) {
export default function getTocItems(page: Page): TocItem[] | undefined {
// only process product and category tocs
if (!page.relativePath.endsWith('index.md')) return
if (page.relativePath === 'index.md') return
@@ -23,19 +34,24 @@ export default function getTocItems(page) {
return []
}
return rawItems.map((item) => {
const tocItem = {}
return rawItems
.map((item: string) => {
const match = item.match(linkString)
if (!match) return null
// a product's toc items are always categories
// whereas a category's toc items can be either subcategories or articles
tocItem.type = productTOCs.includes(page.relativePath)
? 'category'
: item.includes('topic_')
? 'subcategory'
: 'article'
const tocItem: TocItem = {} as TocItem
tocItem.href = item.match(linkString)[1]
// a product's toc items are always categories
// whereas a category's toc items can be either subcategories or articles
tocItem.type = productTOCs.includes(page.relativePath)
? 'category'
: page.relativePath.includes('/index.md')
? 'subcategory'
: 'article'
return tocItem
})
tocItem.href = match[1]
return tocItem
})
.filter((item): item is TocItem => item !== null)
}

View File

@@ -5,26 +5,35 @@ import walk from 'walk-sync'
import createTree from '@/frame/lib/create-tree'
interface Page {
relativePath: string
}
interface TreeNode {
page: Page
childPages?: TreeNode[]
}
describe('content files', () => {
test.each(['content', 'src/fixtures/fixtures/content'])(
'no content files left orphaned without being in the tree in %s',
async (contentDir) => {
async (contentDir: string) => {
const tree = await createTree(contentDir)
const traverse = (node) => {
const traverse = (node: TreeNode): string[] => {
const relativeFiles = [node.page.relativePath]
for (const child of node.childPages || []) {
relativeFiles.push(...traverse(child))
}
return relativeFiles
}
const relativeFiles = traverse(tree).map((p) => path.join(contentDir, p))
const relativeFiles = tree ? traverse(tree).map((p: string) => path.join(contentDir, p)) : []
const contentFiles = walk(contentDir, { includeBasePath: true, directories: false }).filter(
(file) => {
(file: string) => {
return file.endsWith('.md') && !file.includes('README')
},
)
const orphanedFiles = contentFiles.filter((file) => !relativeFiles.includes(file))
) as string[]
const orphanedFiles = contentFiles.filter((file: string) => !relativeFiles.includes(file))
expect(
orphanedFiles.length,
`${orphanedFiles} orphaned files found on disk but not in site tree`,

View File

@@ -18,7 +18,10 @@ describe('find page', () => {
languageCode: 'en',
})
const englishPermalink = page.permalinks[0].href
const englishPermalink = page?.permalinks[0].href
if (!page || !englishPermalink) {
throw new Error('Page or permalink not found')
}
const redirectToFind = '/some-old-path'
// add named keys
@@ -26,7 +29,12 @@ describe('find page', () => {
[englishPermalink]: page,
}
const redirectedPage = findPage(redirectToFind, pageMap, page.buildRedirects())
expect(typeof redirectedPage.title).toBe('string')
const redirectedPage = findPage(
redirectToFind,
pageMap as any, // Using any due to type conflicts between different Page type definitions
page.buildRedirects(),
)
expect(redirectedPage).toBeDefined()
expect(typeof redirectedPage?.title).toBe('string')
})
})

View File

@@ -1,11 +1,27 @@
import { sentenceCase } from 'change-case'
import GithubSlugger from 'github-slugger'
interface RawPreview {
title: string
toggled_on: string[]
toggled_by: string
announcement?: unknown
updates?: unknown
}
interface ProcessedPreview extends Omit<RawPreview, 'announcement' | 'updates'> {
accept_header: string
href: string
}
const slugger = new GithubSlugger()
const inputOrPayload = /(Input|Payload)$/m
export default function processPreviews(previews) {
export default function processPreviews(previews: RawPreview[]): ProcessedPreview[] {
// clean up raw yml data
previews.forEach((preview) => {
// Using any type because we're mutating the preview object to add new properties
// that don't exist in the RawPreview interface (accept_header, href)
previews.forEach((preview: any) => {
preview.title = sentenceCase(preview.title)
.replace(/ -.+/, '') // remove any extra info that follows a hyphen
.replace('it hub', 'itHub') // fix overcorrected `git hub` from sentenceCasing
@@ -16,7 +32,7 @@ export default function processPreviews(previews) {
// filter out schema members that end in `Input` or `Payload`
preview.toggled_on = preview.toggled_on.filter(
(schemaMember) => !inputOrPayload.test(schemaMember),
(schemaMember: string) => !inputOrPayload.test(schemaMember),
)
// remove unnecessary leading colon
@@ -32,5 +48,5 @@ export default function processPreviews(previews) {
preview.href = `/graphql/overview/schema-previews#${slugger.slug(preview.title)}`
})
return previews
return previews as ProcessedPreview[]
}

View File

@@ -12,6 +12,13 @@ export const LOG_LEVELS = {
warn: 1,
info: 2,
debug: 3,
} as const
type LogLevel = keyof typeof LOG_LEVELS
type LogLevelValue = (typeof LOG_LEVELS)[LogLevel]
function isValidLogLevel(level: string): level is LogLevel {
return level in LOG_LEVELS
}
// We set the log level based on the LOG_LEVEL environment variable
@@ -19,19 +26,22 @@ export const LOG_LEVELS = {
// - 'info' in development
// - 'debug' in production
// - 'debug' in test - this is because `vitest` turns off logs unless --silent=false is passed
export function getLogLevelNumber() {
let defaultLogLevel = 'info'
export function getLogLevelNumber(): LogLevelValue {
let defaultLogLevel: LogLevel = 'info'
if (
!process.env.LOG_LEVEL &&
(process.env.NODE_ENV === 'production' || process.env.NODE_ENV === 'test')
) {
defaultLogLevel = 'debug'
}
const logLevel = process.env.LOG_LEVEL?.toLowerCase() || defaultLogLevel
const envLogLevel = process.env.LOG_LEVEL?.toLowerCase() || defaultLogLevel
const logLevel = isValidLogLevel(envLogLevel) ? envLogLevel : defaultLogLevel
return LOG_LEVELS[logLevel]
}
export const useProductionLogging = () => {
export const useProductionLogging = (): boolean => {
return (
(process.env.NODE_ENV === 'production' && !process.env.CI) ||
process.env.LOG_LIKE_PRODUCTION === 'true'

View File

@@ -1,10 +1,26 @@
import { readFile, writeFile } from 'fs/promises'
const STATIC_REDIRECTS = 'src/rest/data/client-side-rest-api-redirects.json'
const REST_API_OVERRIDES = 'src/rest/lib/rest-api-overrides.json'
interface OperationUrl {
originalUrl: string
category: string
subcategory?: string
}
interface RestApiOverrides {
operationUrls: Record<string, OperationUrl>
sectionUrls: Record<string, string>
}
interface RedirectMap {
[oldUrl: string]: string
}
// This is way to add redirects from one fragment to another from the
// client's browser.
export async function syncRestRedirects() {
export async function syncRestRedirects(): Promise<void> {
const clientSideRedirects = await getClientSideRedirects()
await writeFile(STATIC_REDIRECTS, JSON.stringify(clientSideRedirects, null, 2), 'utf8')
@@ -13,11 +29,13 @@ export async function syncRestRedirects() {
// Reads in src/rest/lib/rest-api-overrides.json and generates the
// redirect file src/rest/data/client-side-rest-api-redirects.json
async function getClientSideRedirects() {
const { operationUrls, sectionUrls } = JSON.parse(await readFile(REST_API_OVERRIDES, 'utf8'))
async function getClientSideRedirects(): Promise<RedirectMap> {
const { operationUrls, sectionUrls }: RestApiOverrides = JSON.parse(
await readFile(REST_API_OVERRIDES, 'utf8'),
)
const operationRedirects = {}
Object.values(operationUrls).forEach((value) => {
const operationRedirects: RedirectMap = {}
Object.values(operationUrls).forEach((value: OperationUrl) => {
const oldUrl = value.originalUrl.replace('/rest/reference', '/rest')
const anchor = oldUrl.split('#')[1]
const subcategory = value.subcategory
@@ -26,7 +44,7 @@ async function getClientSideRedirects() {
: `/rest/${value.category}#${anchor}`
operationRedirects[oldUrl] = redirectTo
})
const redirects = {
const redirects: RedirectMap = {
...operationRedirects,
...sectionUrls,
}