1
0
mirror of synced 2025-12-19 09:57:42 -05:00

Enable @typescript-eslint/ban-ts-comment and fix all type errors (#58296)

This commit is contained in:
Kevin Heis
2025-11-04 14:19:22 -08:00
committed by GitHub
parent 890353b583
commit 3755f4c920
75 changed files with 640 additions and 238 deletions

View File

@@ -96,7 +96,6 @@ export default [
camelcase: 'off', // Many gh apis use underscores, 600+ uses
// Disabled rules to review
'@typescript-eslint/ban-ts-comment': 'off', // 50+
'github/array-foreach': 'off', // 250+
'no-console': 'off', // 800+
'@typescript-eslint/no-explicit-any': 'off', // 1000+

View File

@@ -17,7 +17,6 @@ import path from 'path'
import { program } from 'commander'
import chalk from 'chalk'
import cheerio from 'cheerio'
// @ts-ignore see https://github.com/sindresorhus/file-type/issues/652
import { fileTypeFromFile } from 'file-type'
import walk from 'walk-sync'
import isSVG from 'is-svg'

View File

@@ -4,7 +4,6 @@ import { customConfig } from '@/content-linter/style/github-docs'
import type { Rule } from '@/content-linter/types'
// Import markdownlint rules - external library without TypeScript declarations
// @ts-ignore - markdownlint doesn't provide TypeScript declarations
import markdownlintRules from '../../../../node_modules/markdownlint/lib/rules'
export const customRules: Rule[] = gitHubDocsMarkdownlint.rules

View File

@@ -8,7 +8,10 @@
export function printAnnotationResults(
// Using 'any' type as results structure is dynamic and comes from various linting tools with different formats
results: any,
{ skippableRules = [], skippableFlawProperties = [] } = {},
{
skippableRules = [],
skippableFlawProperties = [],
}: { skippableRules?: string[]; skippableFlawProperties?: string[] } = {},
) {
for (const [file, flaws] of Object.entries(results)) {
// Using 'any' type for flaws as they have varying structures depending on the linting rule

View File

@@ -1,4 +1,3 @@
// @ts-ignore - markdownlint-rule-helpers doesn't provide TypeScript declarations
import { addError, filterTokens } from 'markdownlint-rule-helpers'
import matter from '@gr2m/gray-matter'

View File

@@ -1,4 +1,3 @@
// @ts-ignore - markdownlint-rule-helpers doesn't provide TypeScript declarations
import { addError, filterTokens } from 'markdownlint-rule-helpers'
import type { RuleParams, RuleErrorCallback, MarkdownToken } from '@/content-linter/types'

View File

@@ -1,4 +1,3 @@
// @ts-ignore - markdownlint-rule-helpers doesn't provide TypeScript declarations
import { addError, filterTokens } from 'markdownlint-rule-helpers'
import { getFrontmatter } from '@/content-linter/lib/helpers/utils'

View File

@@ -1,4 +1,3 @@
// @ts-ignore - markdownlint-rule-helpers doesn't have TypeScript declarations
import { addError } from 'markdownlint-rule-helpers'
import Ajv from 'ajv'

View File

@@ -1,4 +1,3 @@
// @ts-ignore - markdownlint-rule-helpers doesn't provide TypeScript declarations
import { addError } from 'markdownlint-rule-helpers'
import yaml from 'js-yaml'

View File

@@ -1,4 +1,3 @@
// @ts-ignore - markdownlint-rule-helpers doesn't provide TypeScript declarations
import { addError, newLineRe } from 'markdownlint-rule-helpers'
import type { RuleParams, RuleErrorCallback, MarkdownToken, Rule } from '@/content-linter/types'

View File

@@ -1,6 +1,5 @@
import fs from 'fs'
import path from 'path'
// @ts-ignore - markdownlint-rule-helpers doesn't have TypeScript declarations
import { addError } from 'markdownlint-rule-helpers'
import { getFrontmatter } from '../helpers/utils'

View File

@@ -1,4 +1,3 @@
// @ts-ignore - markdownlint-rule-helpers doesn't have TypeScript declarations
import { addError } from 'markdownlint-rule-helpers'
import type { RuleParams, RuleErrorCallback } from '../../types'

View File

@@ -1,4 +1,3 @@
// @ts-ignore - markdownlint-rule-helpers doesn't have TypeScript declarations
import { addError } from 'markdownlint-rule-helpers'
import { getFrontmatter } from '../helpers/utils'

View File

@@ -1,6 +1,5 @@
import fs from 'fs'
import path from 'path'
// @ts-ignore - markdownlint-rule-helpers doesn't provide TypeScript declarations
import { addError } from 'markdownlint-rule-helpers'
import { getFrontmatter } from '../helpers/utils'

View File

@@ -1,4 +1,3 @@
// @ts-ignore - markdownlint-rule-helpers doesn't have TypeScript declarations
import { addError } from 'markdownlint-rule-helpers'
import { intersection } from 'lodash-es'

View File

@@ -1,4 +1,3 @@
// @ts-ignore - markdownlint-rule-helpers doesn't provide TypeScript declarations
import { addError } from 'markdownlint-rule-helpers'
import { getFrontmatter } from '@/content-linter/lib/helpers/utils'
import type { RuleParams, RuleErrorCallback, Rule } from '@/content-linter/types'

View File

@@ -1,4 +1,3 @@
// @ts-ignore - markdownlint-rule-helpers doesn't provide TypeScript declarations
import { addError } from 'markdownlint-rule-helpers'
import path from 'path'

View File

@@ -1,4 +1,3 @@
// @ts-ignore - markdownlint-rule-helpers doesn't have TypeScript declarations
import { addError, ellipsify } from 'markdownlint-rule-helpers'
import type { RuleParams, RuleErrorCallback } from '../../types'

View File

@@ -1,4 +1,3 @@
// @ts-ignore - markdownlint-rule-helpers doesn't have TypeScript declarations
import { addError, ellipsify } from 'markdownlint-rule-helpers'
import type { RuleParams, RuleErrorCallback } from '../../types'

View File

@@ -1,4 +1,3 @@
// @ts-ignore - markdownlint-rule-helpers doesn't provide TypeScript declarations
import { addError, filterTokens } from 'markdownlint-rule-helpers'
import type { RuleParams, RuleErrorCallback, MarkdownToken } from '@/content-linter/types'

View File

@@ -1,4 +1,3 @@
// @ts-ignore - markdownlint-rule-helpers doesn't have TypeScript declarations
import { addError } from 'markdownlint-rule-helpers'
import { forEachInlineChild, getRange } from '../helpers/utils'

View File

@@ -1,4 +1,3 @@
// @ts-ignore - markdownlint-rule-helpers doesn't have TypeScript declarations
import { addError } from 'markdownlint-rule-helpers'
import type { RuleParams, RuleErrorCallback } from '../../types'

View File

@@ -1,4 +1,3 @@
// @ts-ignore - markdownlint-rule-helpers doesn't provide TypeScript declarations
import { addError } from 'markdownlint-rule-helpers'
import { forEachInlineChild } from '@/content-linter/lib/helpers/utils'

View File

@@ -1,6 +1,4 @@
// @ts-ignore - markdownlint-rule-search-replace doesn't provide TypeScript declarations
import searchReplace from 'markdownlint-rule-search-replace'
// @ts-ignore - @github/markdownlint-github doesn't provide TypeScript declarations
import markdownlintGitHub from '@github/markdownlint-github'
import { imageAltTextEndPunctuation } from '@/content-linter/lib/linting-rules/image-alt-text-end-punctuation'

View File

@@ -1,4 +1,3 @@
// @ts-ignore - markdownlint-rule-helpers doesn't have TypeScript declarations
import { filterTokens } from 'markdownlint-rule-helpers'
import { addFixErrorDetail, getRange } from '../helpers/utils'

View File

@@ -1,4 +1,3 @@
// @ts-ignore - markdownlint-rule-helpers doesn't have TypeScript declarations
import { addError, filterTokens } from 'markdownlint-rule-helpers'
import { getRange } from '../helpers/utils'

View File

@@ -1,4 +1,3 @@
// @ts-ignore - markdownlint-rule-helpers doesn't have TypeScript declarations
import { filterTokens } from 'markdownlint-rule-helpers'
import { addFixErrorDetail, getRange } from '../helpers/utils'

View File

@@ -1,6 +1,5 @@
import fs from 'fs'
import path from 'path'
// @ts-ignore - markdownlint-rule-helpers doesn't have TypeScript declarations
import { addError } from 'markdownlint-rule-helpers'
import { getFrontmatter } from '../helpers/utils'

View File

@@ -1,4 +1,3 @@
// @ts-ignore - markdownlint-rule-helpers doesn't have TypeScript declarations
import { addError } from 'markdownlint-rule-helpers'
import { getFrontmatter } from '../helpers/utils'

View File

@@ -1,4 +1,3 @@
// @ts-ignore - markdownlint-rule-helpers doesn't have TypeScript declarations
import { addError } from 'markdownlint-rule-helpers'
import { getFrontmatter } from '../helpers/utils'

View File

@@ -1,4 +1,3 @@
// @ts-ignore - markdownlint-rule-helpers doesn't have TypeScript declarations
import { addError, filterTokens } from 'markdownlint-rule-helpers'
import type { RuleParams, RuleErrorCallback, Rule } from '../../types'

View File

@@ -1,4 +1,3 @@
// @ts-ignore - markdownlint-rule-helpers doesn't have TypeScript declarations
import { addError, filterTokens } from 'markdownlint-rule-helpers'
import { getRange, quotePrecedesLinkOpen } from '../helpers/utils'
import { escapeRegExp } from 'lodash-es'

View File

@@ -1,4 +1,3 @@
// @ts-ignore - markdownlint-rule-helpers doesn't provide TypeScript declarations
import { addError } from 'markdownlint-rule-helpers'
import { TokenKind } from 'liquidjs'

View File

@@ -1,4 +1,3 @@
// @ts-ignore - markdownlint-rule-helpers doesn't provide TypeScript declarations
import { addError } from 'markdownlint-rule-helpers'
import {

View File

@@ -1,5 +1,4 @@
import { TokenKind } from 'liquidjs'
// @ts-ignore - markdownlint-rule-helpers doesn't have TypeScript declarations
import { addError } from 'markdownlint-rule-helpers'
import { getLiquidTokens, conditionalTags, getPositionData } from '../helpers/liquid-utils'

View File

@@ -1,4 +1,3 @@
// @ts-ignore - markdownlint-rule-helpers doesn't provide TypeScript declarations
import { addError } from 'markdownlint-rule-helpers'
import { getFrontmatter } from '../helpers/utils'

View File

@@ -1,6 +1,5 @@
import semver from 'semver'
import { TokenKind } from 'liquidjs'
// @ts-ignore - markdownlint-rule-helpers doesn't provide TypeScript declarations
import { addError } from 'markdownlint-rule-helpers'
import { getRange, addFixErrorDetail } from '../helpers/utils'

View File

@@ -1,4 +1,3 @@
// @ts-ignore - markdownlint-rule-helpers doesn't provide TypeScript declarations
import { addError, ellipsify } from 'markdownlint-rule-helpers'
import { getRange } from '@/content-linter/lib/helpers/utils'

View File

@@ -1,4 +1,3 @@
// @ts-ignore - markdownlint-rule-helpers doesn't have TypeScript declarations
import { addError } from 'markdownlint-rule-helpers'
import { TokenKind } from 'liquidjs'
import path from 'path'

View File

@@ -1,4 +1,3 @@
// @ts-ignore - markdownlint-rule-helpers doesn't provide TypeScript declarations
import { addError } from 'markdownlint-rule-helpers'
import { getRange } from '../helpers/utils'
import frontmatter from '@/frame/lib/read-frontmatter'

View File

@@ -1,4 +1,3 @@
// @ts-ignore - markdownlint-rule-helpers doesn't provide TypeScript declarations
import { addError } from 'markdownlint-rule-helpers'
import type { RuleParams, RuleErrorCallback, Rule } from '@/content-linter/types'

View File

@@ -1,4 +1,3 @@
// @ts-ignore - markdownlint-rule-helpers doesn't provide TypeScript declarations
import { addError, filterTokens } from 'markdownlint-rule-helpers'
import yaml from 'js-yaml'

View File

@@ -1,4 +1,3 @@
// @ts-ignore - markdownlint-rule-helpers doesn't provide TypeScript declarations
import { addError, filterTokens } from 'markdownlint-rule-helpers'
import type { RuleParams, RuleErrorCallback, MarkdownToken } from '@/content-linter/types'

View File

@@ -1,5 +1,4 @@
import yaml from 'js-yaml'
// @ts-ignore - markdownlint-rule-helpers doesn't have TypeScript declarations
import { addError, filterTokens } from 'markdownlint-rule-helpers'
import { liquid } from '@/content-render/index'

View File

@@ -2,7 +2,6 @@
* @purpose Writer tool
* @description Run the Docs content linter, specifying paths and optional rules
*/
// @ts-nocheck
import fs from 'fs'
import path from 'path'
import { execSync } from 'child_process'
@@ -20,6 +19,70 @@ import { prettyPrintResults } from './pretty-print-results'
import { getLintableYml } from '@/content-linter/lib/helpers/get-lintable-yml'
import { printAnnotationResults } from '../lib/helpers/print-annotations'
import languages from '@/languages/lib/languages-server'
import type { Rule as MarkdownlintRule } from 'markdownlint'
import type { Rule, Config } from '@/content-linter/types'
// Type definitions for Markdownlint results
interface LintError {
lineNumber: number
ruleNames: string[]
ruleDescription: string
ruleInformation: string
errorDetail: string | null
errorContext: string | null
errorRange: [number, number] | null
fixInfo?: {
editColumn?: number
deleteCount?: number
insertText?: string
lineNumber?: number
}
isYamlFile?: boolean
}
type LintResults = Record<string, LintError[]>
interface FileList {
length: number
content: string[]
data: string[]
yml: string[]
}
interface ConfiguredRules {
content: MarkdownlintRule[]
data: MarkdownlintRule[]
frontMatter: MarkdownlintRule[]
yml: MarkdownlintRule[]
}
interface LintConfig {
content: Record<string, any> // Markdownlint config object
data: Record<string, any>
frontMatter: Record<string, any>
yml: Record<string, any>
}
interface MarkdownLintConfigResult {
config: LintConfig
configuredRules: ConfiguredRules
}
interface FormattedResult {
ruleDescription: string
ruleNames: string[]
lineNumber: number
columnNumber?: number
severity: string
errorDetail?: string
errorContext?: string
context?: string
fixable?: boolean
// Index signature allows additional properties from LintError that may vary by rule
[key: string]: any
}
type FormattedResults = Record<string, FormattedResult[]>
/**
* Config that applies to all rules in all environments (CI, reports, precommit).
@@ -99,20 +162,20 @@ async function main() {
const start = Date.now()
// Initializes the config to pass to markdownlint based on the input options
const { config, configuredRules } = getMarkdownLintConfig(errorsOnly, rules, customRules)
const { config, configuredRules } = getMarkdownLintConfig(errorsOnly, rules || [])
// Run Markdownlint for content directory
const resultContent = await markdownlint.promises.markdownlint({
const resultContent = (await markdownlint.promises.markdownlint({
files: files.content,
config: config.content,
customRules: configuredRules.content,
})
})) as LintResults
// Run Markdownlint for data directory
const resultData = await markdownlint.promises.markdownlint({
const resultData = (await markdownlint.promises.markdownlint({
files: files.data,
config: config.data,
customRules: configuredRules.data,
})
})) as LintResults
// Run Markdownlint for content directory (frontmatter only)
const resultFrontmatter = await markdownlint.promises.markdownlint({
@@ -123,20 +186,20 @@ async function main() {
})
// Run Markdownlint on "lintable" Markdown strings in a YML file
const resultYml = {}
const resultYml: LintResults = {}
for (const ymlFile of files.yml) {
const lintableYml = await getLintableYml(ymlFile)
if (!lintableYml) continue
const resultYmlFile = await markdownlint.promises.markdownlint({
const resultYmlFile = (await markdownlint.promises.markdownlint({
strings: lintableYml,
config: config.yml,
customRules: configuredRules.yml,
})
})) as LintResults
Object.entries(resultYmlFile).forEach(([key, value]) => {
if (value.length) {
const errors = value.map((error) => {
if ((value as LintError[]).length) {
const errors = (value as LintError[]).map((error) => {
// Autofixing would require us to write the changes back to the YML
// file which Markdownlint doesn't support. So we don't support
// autofixing for YML files at this time.
@@ -152,13 +215,13 @@ async function main() {
// There are no collisions when assigning the results to the new object
// because the keys are filepaths and the individual runs of Markdownlint
// are in separate directories (content and data).
const results = Object.assign({}, resultContent, resultData, resultYml)
const results: LintResults = Object.assign({}, resultContent, resultData, resultYml)
// Merge in the results for frontmatter tests, which could be
// in a file that already exists as a key in the `results` object.
Object.entries(resultFrontmatter).forEach(([key, value]) => {
if (results[key]) results[key].push(...value)
else results[key] = value
if (results[key]) results[key].push(...(value as LintError[]))
else results[key] = value as LintError[]
})
// Apply markdownlint fixes if available and rewrite the files
@@ -169,7 +232,7 @@ async function main() {
continue
}
const content = fs.readFileSync(file, 'utf8')
const applied = applyFixes(content, results[file])
const applied = applyFixes(content, results[file] as any)
if (content !== applied) {
countFixedFiles++
fs.writeFileSync(file, applied, 'utf-8')
@@ -191,13 +254,7 @@ async function main() {
reportSummaryByRule(results, config)
} else if (errorFileCount > 0 || warningFileCount > 0 || countFixedFiles > 0) {
if (outputFile) {
fs.writeFileSync(
`${outputFile}`,
JSON.stringify(formattedResults, undefined, 2),
function (err) {
if (err) throw err
},
)
fs.writeFileSync(`${outputFile}`, JSON.stringify(formattedResults, undefined, 2), 'utf-8')
console.log(`Output written to ${outputFile}`)
} else {
prettyPrintResults(formattedResults, {
@@ -214,8 +271,8 @@ async function main() {
// and columns numbers of YAML files. YAML files consist of one
// or more Markdown strings that can themselves constitute an
// entire "file."
'isYamlFile',
],
'isYamlFile' as string,
] as string[],
})
}
@@ -290,7 +347,12 @@ async function main() {
}
}
function pluralize(things, word, pluralForm = null) {
// Using unknown[] to accept arrays of any type (errors, warnings, files, etc.)
function pluralize(
things: unknown[] | number,
word: string,
pluralForm: string | null = null,
): string {
const isPlural = Array.isArray(things) ? things.length !== 1 : things !== 1
if (isPlural) {
return pluralForm || `${word}s`
@@ -306,8 +368,8 @@ function pluralize(things, word, pluralForm = null) {
// (e.g., heading linters) so we need to separate the
// list of data files from all other files to run
// through markdownlint individually
function getFilesToLint(inputPaths) {
const fileList = {
function getFilesToLint(inputPaths: string[]): FileList {
const fileList: FileList = {
length: 0,
content: [],
data: [],
@@ -347,7 +409,7 @@ function getFilesToLint(inputPaths) {
const seen = new Set()
function cleanPaths(filePaths) {
function cleanPaths(filePaths: string[]): string[] {
const clean = []
for (const filePath of filePaths) {
if (
@@ -390,21 +452,21 @@ function getFilesToLint(inputPaths) {
* isInDir('/foo', '/foo') => true
* isInDir('/foo/barring', '/foo/bar') => false
*/
function isInDir(child, parent) {
function isInDir(child: string, parent: string): boolean {
// The simple reason why you can't use `parent.startsWith(child)`
// is because the parent might be `/path/to/data` and the child
// might be `/path/to/data-files`.
const parentSplit = parent.split(path.sep)
const childSplit = child.split(path.sep)
return parentSplit.every((dir, i) => dir === childSplit[i])
return parentSplit.every((dir: string, i: number) => dir === childSplit[i])
}
// This is a function used during development to
// see how many errors we have per rule. This helps
// to identify rules that can be upgraded from
// warning severity to error.
function reportSummaryByRule(results, config) {
const ruleCount = {}
function reportSummaryByRule(results: LintResults, config: LintConfig): void {
const ruleCount: Record<string, number> = {}
// populate the list of rules with 0 occurrences
for (const rule of Object.keys(config.content)) {
@@ -431,16 +493,21 @@ function reportSummaryByRule(results, config) {
result. Results are sorted by severity per file, with errors
listed first then warnings.
*/
function getFormattedResults(allResults, isInPrecommitMode) {
const output = {}
function getFormattedResults(
allResults: LintResults,
isInPrecommitMode: boolean,
): FormattedResults {
const output: FormattedResults = {}
Object.entries(allResults)
// Each result key always has an array value, but it may be empty
.filter(([, results]) => results.length)
.forEach(([key, fileResults]) => {
if (verbose) {
output[key] = [...fileResults]
output[key] = fileResults.map((flaw: LintError) => formatResult(flaw, isInPrecommitMode))
} else {
const formattedResults = fileResults.map((flaw) => formatResult(flaw, isInPrecommitMode))
const formattedResults = fileResults.map((flaw: LintError) =>
formatResult(flaw, isInPrecommitMode),
)
// Only add the file to output if there are results after filtering
if (formattedResults.length > 0) {
@@ -458,19 +525,24 @@ function getFormattedResults(allResults, isInPrecommitMode) {
// and the value being an array of errors for that filepath.
// Each result has a rule name, which when looked up in `allConfig`
// will give us its severity and we filter those that are 'warning'.
function getWarningCountByFile(results, fixed = false) {
function getWarningCountByFile(results: FormattedResults, fixed = false): number {
return getCountBySeverity(results, 'warning', fixed)
}
// Results are formatted with the key being the filepath
// and the value being an array of results for that filepath.
// Each result in the array has a severity of error or warning.
function getErrorCountByFile(results, fixed = false) {
function getErrorCountByFile(results: FormattedResults, fixed = false): number {
return getCountBySeverity(results, 'error', fixed)
}
function getCountBySeverity(results, severityLookup, fixed) {
return Object.values(results).filter((fileResults) =>
fileResults.some((result) => {
function getCountBySeverity(
results: FormattedResults,
severityLookup: string,
fixed: boolean,
): number {
return Object.values(results).filter((fileResults: FormattedResult[]) =>
fileResults.some((result: FormattedResult) => {
// If --fix was applied, we don't want to know about files that
// no longer have errors or warnings.
return result.severity === severityLookup && (!fixed || !result.fixable)
@@ -481,19 +553,19 @@ function getCountBySeverity(results, severityLookup, fixed) {
// Removes null values and properties that are not relevant to content
// writers, adds the severity to each result object, and transforms
// some error and fix data into a more readable format.
function formatResult(object, isInPrecommitMode) {
const formattedResult = {}
function formatResult(object: LintError, isInPrecommitMode: boolean): FormattedResult {
const formattedResult: FormattedResult = {} as FormattedResult
// Add severity to each result object
const ruleName = object.ruleNames[1] || object.ruleNames[0]
if (!allConfig[ruleName]) {
const ruleConfig = allConfig[ruleName] as Config | undefined
if (!ruleConfig) {
throw new Error(`Rule not found in allConfig: '${ruleName}'`)
}
formattedResult.severity =
allConfig[ruleName].severity ||
getSearchReplaceRuleSeverity(ruleName, object, isInPrecommitMode)
ruleConfig.severity || getSearchReplaceRuleSeverity(ruleName, object, isInPrecommitMode)
formattedResult.context = allConfig[ruleName].context || ''
formattedResult.context = ruleConfig.context || ''
return Object.entries(object).reduce((acc, [key, value]) => {
if (key === 'fixInfo') {
@@ -503,7 +575,7 @@ function formatResult(object, isInPrecommitMode) {
}
if (!value) return acc
if (key === 'errorRange') {
acc.columnNumber = value[0]
acc.columnNumber = (value as [number, number])[0]
delete acc.range
return acc
}
@@ -545,14 +617,17 @@ function listRules() {
Rules that can't be run on partials have the property
`partial-markdown-files` set to false.
*/
function getMarkdownLintConfig(filterErrorsOnly, runRules) {
function getMarkdownLintConfig(
filterErrorsOnly: boolean,
runRules: string[],
): MarkdownLintConfigResult {
const config = {
content: structuredClone(defaultConfig),
data: structuredClone(defaultConfig),
frontMatter: structuredClone(defaultConfig),
yml: structuredClone(defaultConfig),
}
const configuredRules = {
const configuredRules: ConfiguredRules = {
content: [],
data: [],
frontMatter: [],
@@ -560,12 +635,12 @@ function getMarkdownLintConfig(filterErrorsOnly, runRules) {
}
for (const [ruleName, ruleConfig] of Object.entries(allConfig)) {
const customRule = customConfig[ruleName] && getCustomRule(ruleName)
const customRule = (customConfig as any)[ruleName] && getCustomRule(ruleName)
// search-replace is handled differently than other rules because
// it has nested metadata and rules.
if (
filterErrorsOnly &&
getRuleSeverity(ruleConfig, isPrecommit) !== 'error' &&
getSeverity(ruleConfig as any, isPrecommit) !== 'error' &&
ruleName !== 'search-replace'
) {
continue
@@ -575,8 +650,8 @@ function getMarkdownLintConfig(filterErrorsOnly, runRules) {
if (runRules && !shouldIncludeRule(ruleName, runRules)) continue
// There are a subset of rules run on just the frontmatter in files
if (githubDocsFrontmatterConfig[ruleName]) {
config.frontMatter[ruleName] = ruleConfig
if ((githubDocsFrontmatterConfig as any)[ruleName]) {
;(config.frontMatter as any)[ruleName] = ruleConfig
if (customRule) configuredRules.frontMatter.push(customRule)
}
// Handle the special case of the search-replace rule
@@ -589,23 +664,23 @@ function getMarkdownLintConfig(filterErrorsOnly, runRules) {
const frontmatterSearchReplaceRules = []
for (const searchRule of ruleConfig.rules) {
const searchRuleSeverity = getRuleSeverity(searchRule, isPrecommit)
const searchRuleSeverity = getSeverity(searchRule, isPrecommit)
if (filterErrorsOnly && searchRuleSeverity !== 'error') continue
// Add search-replace rules to frontmatter configuration for rules that make sense in frontmatter
// This ensures rules like TODOCS detection work in frontmatter
// Rules with applyToFrontmatter should ONLY run in the frontmatter pass (which lints the entire file)
// to avoid duplicate detections
if (searchRule.applyToFrontmatter) {
frontmatterSearchReplaceRules.push(searchRule)
frontmatterSearchReplaceRules.push(searchRule as any)
} else {
// Only add to content rules if not a frontmatter-specific rule
searchReplaceRules.push(searchRule)
searchReplaceRules.push(searchRule as any)
}
if (searchRule['partial-markdown-files']) {
dataSearchReplaceRules.push(searchRule)
dataSearchReplaceRules.push(searchRule as any)
}
if (searchRule['yml-files']) {
ymlSearchReplaceRules.push(searchRule)
ymlSearchReplaceRules.push(searchRule as any)
}
}
@@ -645,7 +720,7 @@ function getMarkdownLintConfig(filterErrorsOnly, runRules) {
// Return the severity value of a rule but keep in mind it could be
// running as a precommit hook, which means the severity could be
// deliberately different.
function getRuleSeverity(ruleConfig, isInPrecommitMode) {
function getSeverity(ruleConfig: Config, isInPrecommitMode: boolean): string {
return isInPrecommitMode
? ruleConfig.precommitSeverity || ruleConfig.severity
: ruleConfig.severity
@@ -653,7 +728,7 @@ function getRuleSeverity(ruleConfig, isInPrecommitMode) {
// Gets a custom rule function from the name of the rule
// in the configuration file
function getCustomRule(ruleName) {
function getCustomRule(ruleName: string): Rule | MarkdownlintRule {
const rule = customRules.find((r) => r.names.includes(ruleName))
if (!rule)
throw new Error(
@@ -664,7 +739,7 @@ function getCustomRule(ruleName) {
// Check if a rule should be included based on user-specified rules
// Handles both short names (e.g., GHD053, MD001) and long names (e.g., header-content-requirement, heading-increment)
export function shouldIncludeRule(ruleName, runRules) {
export function shouldIncludeRule(ruleName: string, runRules: string[]) {
// First check if the rule name itself is in the list
if (runRules.includes(ruleName)) {
return true
@@ -679,7 +754,7 @@ export function shouldIncludeRule(ruleName, runRules) {
// For built-in markdownlint rules, check if any of the rule's names are in the runRules list
const builtinRule = allRules.find((rule) => rule.names.includes(ruleName))
if (builtinRule) {
return builtinRule.names.some((name) => runRules.includes(name))
return builtinRule.names.some((name: string) => runRules.includes(name))
}
return false
@@ -703,9 +778,15 @@ export function shouldIncludeRule(ruleName, runRules) {
fixInfo: null
}
*/
function getSearchReplaceRuleSeverity(ruleName, object, isInPrecommitMode) {
const pluginRuleName = object.errorDetail.split(':')[0].trim()
const rule = allConfig[ruleName].rules.find((r) => r.name === pluginRuleName)
function getSearchReplaceRuleSeverity(
ruleName: string,
object: LintError,
isInPrecommitMode: boolean,
) {
const pluginRuleName = object.errorDetail?.split(':')[0].trim()
const ruleConfig = allConfig[ruleName] as Config
const rule = ruleConfig.rules?.find((r) => r.name === pluginRuleName)
if (!rule) return 'error' // Default to error if rule not found
return isInPrecommitMode ? rule.precommitSeverity || rule.severity : rule.severity
}
@@ -745,6 +826,6 @@ function isOptionsValid() {
return true
}
function isAFixtureMdFile(filePath) {
function isAFixtureMdFile(filePath: string): boolean {
return filePath.includes('/src') && filePath.includes('/fixtures') && filePath.endsWith('.md')
}

View File

@@ -1,4 +1,3 @@
// @ts-nocheck
import { fileURLToPath } from 'url'
import path from 'path'
import yaml from 'js-yaml'
@@ -188,19 +187,20 @@ const FbvYamlRelPaths = FbvYamlAbsPaths.map((p) => slash(path.relative(rootDir,
const fbvTuples = zip(FbvYamlRelPaths, FbvYamlAbsPaths)
// Put all the yaml files together
ymlToLint = [].concat(
ymlToLint = ([] as Array<[string | undefined, string | undefined]>).concat(
variableYamlTuples, // These "tuples" not tested independently; they are only tested as part of ymlToLint.
glossariesYamlTuples,
fbvTuples,
)
function formatLinkError(message, links) {
function formatLinkError(message: string, links: string[]) {
return `${message}\n - ${links.join('\n - ')}`
}
// Returns `content` if its a string, or `content.description` if it can.
// Used for getting the nested `description` key in glossary files.
function getContent(content) {
// Using any because content can be string | { description: string } | other YAML structures
function getContent(content: any) {
if (typeof content === 'string') return content
if (typeof content.description === 'string') return content.description
return null
@@ -224,9 +224,10 @@ if (diffFiles.length > 0) {
return name
}),
)
const filterFiles = (tuples) =>
const filterFiles = (tuples: Array<[string | undefined, string | undefined]>) =>
tuples.filter(
([relativePath, absolutePath]) => only.has(relativePath) || only.has(absolutePath),
([relativePath, absolutePath]: [string | undefined, string | undefined]) =>
only.has(relativePath!) || only.has(absolutePath!),
)
ymlToLint = filterFiles(ymlToLint)
}
@@ -239,14 +240,15 @@ if (ymlToLint.length === 0) {
} else {
describe('lint yaml content', () => {
if (ymlToLint.length < 1) return
describe.each(ymlToLint)('%s', (yamlRelPath, yamlAbsPath) => {
let dictionary
let isEarlyAccess
let fileContents
describe.each(ymlToLint)('%s', (yamlRelPath: any, yamlAbsPath: any) => {
// Using any because Vitest's describe.each doesn't properly infer tuple types
let dictionary: any // YAML structure varies by file type (variables, glossaries, features)
let isEarlyAccess: boolean
let fileContents: string
// This variable is used to determine if the file was parsed successfully.
// When `yaml.load()` fails to parse the file, it is overwritten with the error message.
// `false` is intentionally chosen since `null` and `undefined` are valid return values.
let dictionaryError = false
let dictionaryError: any = false // Can be false, Error, or other error types
beforeAll(async () => {
fileContents = await fs.readFile(yamlAbsPath, 'utf8')
@@ -279,7 +281,7 @@ if (ymlToLint.length === 0) {
if (!contentStr) continue
const valMatches = contentStr.match(relativeArticleLinkRegex) || []
if (valMatches.length > 0) {
matches.push(...valMatches.map((match) => `Key "${key}": ${match}`))
matches.push(...valMatches.map((match: string) => `Key "${key}": ${match}`))
}
}
@@ -297,7 +299,7 @@ if (ymlToLint.length === 0) {
if (!contentStr) continue
const valMatches = contentStr.match(earlyAccessLinkRegex) || []
if (valMatches.length > 0) {
matches.push(...valMatches.map((match) => `Key "${key}": ${match}`))
matches.push(...valMatches.map((match: string) => `Key "${key}": ${match}`))
}
}
@@ -316,7 +318,7 @@ if (ymlToLint.length === 0) {
if (!contentStr) continue
const valMatches = contentStr.match(earlyAccessImageRegex) || []
if (valMatches.length > 0) {
matches.push(...valMatches.map((match) => `Key "${key}": ${match}`))
matches.push(...valMatches.map((match: string) => `Key "${key}": ${match}`))
}
}
@@ -335,7 +337,7 @@ if (ymlToLint.length === 0) {
if (!contentStr) continue
const valMatches = contentStr.match(badEarlyAccessImageRegex) || []
if (valMatches.length > 0) {
matches.push(...valMatches.map((match) => `Key "${key}": ${match}`))
matches.push(...valMatches.map((match: string) => `Key "${key}": ${match}`))
}
}
@@ -351,7 +353,7 @@ if (ymlToLint.length === 0) {
if (!contentStr) continue
const valMatches = contentStr.match(languageLinkRegex) || []
if (valMatches.length > 0) {
matches.push(...valMatches.map((match) => `Key "${key}": ${match}`))
matches.push(...valMatches.map((match: string) => `Key "${key}": ${match}`))
}
}
@@ -367,7 +369,7 @@ if (ymlToLint.length === 0) {
if (!contentStr) continue
const valMatches = contentStr.match(versionLinkRegEx) || []
if (valMatches.length > 0) {
matches.push(...valMatches.map((match) => `Key "${key}": ${match}`))
matches.push(...valMatches.map((match: string) => `Key "${key}": ${match}`))
}
}
@@ -383,7 +385,7 @@ if (ymlToLint.length === 0) {
if (!contentStr) continue
const valMatches = contentStr.match(domainLinkRegex) || []
if (valMatches.length > 0) {
matches.push(...valMatches.map((match) => `Key "${key}": ${match}`))
matches.push(...valMatches.map((match: string) => `Key "${key}": ${match}`))
}
}
@@ -400,7 +402,7 @@ if (ymlToLint.length === 0) {
const valMatches = contentStr.match(oldVariableRegex) || []
if (valMatches.length > 0) {
matches.push(
...valMatches.map((match) => {
...valMatches.map((match: string) => {
const example = match.replace(
/{{\s*?site\.data\.([a-zA-Z0-9-_]+(?:\.[a-zA-Z0-9-_]+)+)\s*?}}/g,
'{% data $1 %}',
@@ -423,7 +425,7 @@ if (ymlToLint.length === 0) {
if (!contentStr) continue
const valMatches = contentStr.match(oldOcticonRegex) || []
if (valMatches.length > 0) {
matches.push(...valMatches.map((match) => `Key "${key}": ${match}`))
matches.push(...valMatches.map((match: string) => `Key "${key}": ${match}`))
}
}

View File

@@ -1,6 +1,5 @@
import { describe, expect, test } from 'vitest'
import markdownlint from 'markdownlint'
// @ts-ignore - markdownlint-rule-search-replace doesn't provide TypeScript declarations
import searchReplace from 'markdownlint-rule-search-replace'
import { searchReplaceConfig } from '../../style/github-docs'

View File

@@ -58,6 +58,7 @@ export type Config = {
style?: string
rules?: RuleDetail[]
context?: string
precommitSeverity?: string
}
export type RuleConfig = {

View File

@@ -1,5 +1,4 @@
import { TokenizationError } from 'liquidjs'
// @ts-ignore - @primer/octicons doesn't provide TypeScript declarations
import octicons from '@primer/octicons'
// Note: Using 'any' for liquidjs-related types because liquidjs doesn't provide comprehensive TypeScript definitions

View File

@@ -1,7 +1,6 @@
// src/content-render/liquid/prompt.ts
// Defines {% prompt %}…{% endprompt %} to wrap its content in <code> and append the Copilot icon.
// @ts-ignore - @primer/octicons doesn't provide TypeScript declarations
import octicons from '@primer/octicons'
interface LiquidTag {

View File

@@ -2,7 +2,6 @@
* @purpose Writer tool
* @description Move or rename a file or a folder and automatically add redirects
*/
// @ts-nocheck
// [start-readme]
//
// Use this script to help you move or rename a single file or a folder. The script will move or rename the file or folder for you, update relevant `children` in the index.md file(s), and add a `redirect_from` to frontmatter in the renamed file(s). Note: You will still need to manually update the `title` if necessary.
@@ -35,6 +34,20 @@ import escapeStringRegexp from 'escape-string-regexp'
import fm from '@/frame/lib/frontmatter'
import readFrontmatter from '@/frame/lib/read-frontmatter'
// Type definitions
interface MoveOptions {
verbose: boolean
undo: boolean
git: boolean
}
type FileTuple = [string, string, string, string] // [oldPath, newPath, oldHref, newHref]
interface PositionInfo {
childrenPosition: number
childGroupPositions: number[][]
}
// This is so you can optionally run it again the test fixtures root.
const ROOT = process.env.ROOT || '.'
const CONTENT_ROOT = path.resolve(path.join(ROOT, 'content'))
@@ -52,13 +65,13 @@ program
"DON'T use 'git mv' and 'git commit' to move the file. Just regular file moves.",
)
.option('--undo', 'Reverse of moving. I.e. moving it back. Only applies to the last run.')
.arguments('old', 'old file or folder name')
.arguments('new', 'new file or folder name')
.argument('old', 'old file or folder name')
.argument('new', 'new file or folder name')
.parse(process.argv)
main(program.opts(), program.args)
async function main(opts, nameTuple) {
async function main(opts: MoveOptions, nameTuple: string[]) {
const { verbose, undo, git } = opts
if (nameTuple.length !== 2) {
console.error(
@@ -161,7 +174,7 @@ async function main(opts, nameTuple) {
}
} else {
// When it's just an individual file, it's easier.
const files = [[oldPath, newPath, oldHref, newHref]]
const files: FileTuple[] = [[oldPath, newPath, oldHref, newHref]]
// First take care of the `git mv` (or regular rename) part.
moveFiles(files, opts)
@@ -191,7 +204,7 @@ async function main(opts, nameTuple) {
}
}
function validateFileInputs(oldPath, newPath, isFolder) {
function validateFileInputs(oldPath: string, newPath: string, isFolder: boolean) {
if (isFolder) {
// Make sure that only the last portion of the path is different
// and that all preceding are equal.
@@ -241,17 +254,17 @@ function validateFileInputs(oldPath, newPath, isFolder) {
}
}
function existsAndIsDirectory(directory) {
function existsAndIsDirectory(directory: string) {
return fs.existsSync(directory) && fs.lstatSync(directory).isDirectory()
}
function splitDirectory(directory) {
function splitDirectory(directory: string) {
return [path.dirname(directory), path.basename(directory)]
}
function findFilesInFolder(oldPath, newPath, opts) {
function findFilesInFolder(oldPath: string, newPath: string, opts: MoveOptions): FileTuple[] {
const { undo, verbose } = opts
const files = []
const files: FileTuple[] = []
const allFiles = walk(oldPath, { includeBasePath: true, directories: false })
for (const filePath of allFiles) {
const newFilePath = filePath.replace(oldPath, newPath)
@@ -265,17 +278,17 @@ function findFilesInFolder(oldPath, newPath, opts) {
return files
}
function makeHref(root, filePath) {
function makeHref(root: string, filePath: string) {
const nameSplit = path.relative(root, filePath).split(path.sep)
if (nameSplit.slice(-1)[0] === 'index.md') {
nameSplit.pop()
} else {
nameSplit.push(nameSplit.pop().replace(/\.md$/, ''))
nameSplit.push(nameSplit.pop()!.replace(/\.md$/, ''))
}
return `/${nameSplit.join('/')}`
}
function moveFolder(oldPath, newPath, files, opts) {
function moveFolder(oldPath: string, newPath: string, files: FileTuple[], opts: MoveOptions) {
const { verbose, git: useGit } = opts
if (useGit) {
let cmd = ['mv', oldPath, newPath]
@@ -297,7 +310,7 @@ function moveFolder(oldPath, newPath, files, opts) {
}
}
function undoFolder(oldPath, newPath, files, opts) {
function undoFolder(oldPath: string, newPath: string, files: FileTuple[], opts: MoveOptions) {
const { verbose, git: useGit } = opts
if (useGit) {
@@ -320,12 +333,12 @@ function undoFolder(oldPath, newPath, files, opts) {
}
}
function getBasename(fileOrDirectory) {
function getBasename(fileOrDirectory: string) {
// Note, can't use fs.lstatSync().isDirectory() because it's just a string
// at this point. It might not exist.
if (fileOrDirectory.endsWith('index.md')) {
return path.basename(path.directory(fileOrDirectory))
return path.basename(path.dirname(fileOrDirectory))
}
if (fileOrDirectory.endsWith('.md')) {
return path.basename(fileOrDirectory).replace(/\.md$/, '')
@@ -333,7 +346,7 @@ function getBasename(fileOrDirectory) {
return path.basename(fileOrDirectory)
}
function removeFromChildren(oldPath, opts) {
function removeFromChildren(oldPath: string, opts: MoveOptions): PositionInfo {
const { verbose } = opts
const parentFilePath = path.join(path.dirname(oldPath), 'index.md')
@@ -342,8 +355,8 @@ function removeFromChildren(oldPath, opts) {
const oldName = getBasename(oldPath)
let childrenPosition = -1
if (CHILDREN_KEY in data) {
data[CHILDREN_KEY] = data[CHILDREN_KEY].filter((entry, i) => {
if (data && CHILDREN_KEY in data) {
data[CHILDREN_KEY] = data[CHILDREN_KEY].filter((entry: any, i: number) => {
if (entry === oldName || entry === `/${oldName}`) {
childrenPosition = i
return false
@@ -355,11 +368,11 @@ function removeFromChildren(oldPath, opts) {
}
}
const childGroupPositions = []
const childGroupPositions: number[][] = []
;(data[CHILDGROUPS_KEY] || []).forEach((group, i) => {
;((data && data[CHILDGROUPS_KEY]) || []).forEach((group: any, i: number) => {
if (group.children) {
group.children = group.children.filter((entry, j) => {
group.children = group.children.filter((entry: any, j: number) => {
if (entry === oldName || entry === `/${oldName}`) {
childGroupPositions.push([i, j])
return false
@@ -369,11 +382,13 @@ function removeFromChildren(oldPath, opts) {
}
})
fs.writeFileSync(
parentFilePath,
readFrontmatter.stringify(content, data, { lineWidth: 10000 }),
'utf-8',
)
if (data) {
fs.writeFileSync(
parentFilePath,
readFrontmatter.stringify(content, data, { lineWidth: 10000 } as any),
'utf-8',
)
}
if (verbose) {
console.log(`Removed 'children' (${oldName}) key in ${parentFilePath}`)
}
@@ -381,7 +396,7 @@ function removeFromChildren(oldPath, opts) {
return { childrenPosition, childGroupPositions }
}
function addToChildren(newPath, positions, opts) {
function addToChildren(newPath: string, positions: PositionInfo, opts: MoveOptions) {
const { verbose } = opts
const parentFilePath = path.join(path.dirname(newPath), 'index.md')
const fileContent = fs.readFileSync(parentFilePath, 'utf-8')
@@ -389,10 +404,10 @@ function addToChildren(newPath, positions, opts) {
const newName = getBasename(newPath)
const { childrenPosition, childGroupPositions } = positions
if (childrenPosition > -1) {
if (childrenPosition > -1 && data) {
const children = data[CHILDREN_KEY] || []
let prefix = ''
if (children.every((entry) => entry.startsWith('/'))) {
if (children.every((entry: any) => entry.startsWith('/'))) {
prefix += '/'
}
if (childrenPosition > -1 && childrenPosition < children.length) {
@@ -403,7 +418,7 @@ function addToChildren(newPath, positions, opts) {
data[CHILDREN_KEY] = children
}
if (CHILDGROUPS_KEY in data) {
if (data && CHILDGROUPS_KEY in data) {
for (const [groupIndex, groupChildPosition] of childGroupPositions) {
if (groupIndex < data[CHILDGROUPS_KEY].length) {
const group = data[CHILDGROUPS_KEY][groupIndex]
@@ -416,17 +431,19 @@ function addToChildren(newPath, positions, opts) {
}
}
fs.writeFileSync(
parentFilePath,
readFrontmatter.stringify(content, data, { lineWidth: 10000 }),
'utf-8',
)
if (data) {
fs.writeFileSync(
parentFilePath,
readFrontmatter.stringify(content, data, { lineWidth: 10000 } as any),
'utf-8',
)
}
if (verbose) {
console.log(`Added 'children' (${newName}) key in ${parentFilePath}`)
}
}
function moveFiles(files, opts) {
function moveFiles(files: FileTuple[], opts: MoveOptions) {
const { verbose, git: useGit } = opts
// Before we do anything, assert that the files are valid
for (const [oldPath] of files) {
@@ -474,7 +491,7 @@ function moveFiles(files, opts) {
}
}
function editFiles(files, updateParent, opts) {
function editFiles(files: FileTuple[], updateParent: boolean, opts: MoveOptions) {
const { verbose, git: useGit } = opts
// Second loop. This time our only job is to edit the `redirects_from`
@@ -484,13 +501,14 @@ function editFiles(files, updateParent, opts) {
for (const [oldPath, newPath, oldHref, newHref] of files) {
const fileContent = fs.readFileSync(newPath, 'utf-8')
const { content, data } = readFrontmatter(fileContent)
if (!data) continue
if (!(REDIRECT_FROM_KEY in data)) {
data[REDIRECT_FROM_KEY] = []
}
data[REDIRECT_FROM_KEY].push(oldHref)
fs.writeFileSync(
newPath,
readFrontmatter.stringify(content, data, { lineWidth: 10000 }),
readFrontmatter.stringify(content, data, { lineWidth: 10000 } as any),
'utf-8',
)
if (verbose) {
@@ -515,11 +533,11 @@ function editFiles(files, updateParent, opts) {
const filePaths = files.map(([, newPath]) => newPath)
try {
const cmd = ['run', 'add-content-type', '--', '--paths', ...filePaths]
const result = execFileSync('npm', cmd, { cwd: process.cwd(), encoding: 'utf8' })
const result = execFileSync('npm', cmd, { cwd: process.cwd(), encoding: 'utf8' }) as any
if (result.trim()) {
console.log(result.trim())
}
} catch (error) {
} catch (error: any) {
console.warn(`Warning: Failed to add contentType frontmatter: ${error.message}`)
}
}
@@ -538,22 +556,25 @@ function editFiles(files, updateParent, opts) {
}
}
function undoFiles(files, updateParent, opts) {
function undoFiles(files: FileTuple[], updateParent: boolean, opts: MoveOptions) {
const { verbose, git: useGit } = opts
// First undo any edits to the file
for (const [oldPath, newPath, oldHref, newHref] of files) {
const fileContent = fs.readFileSync(newPath, 'utf-8')
const { content, data } = readFrontmatter(fileContent)
if (!data) continue
data[REDIRECT_FROM_KEY] = (data[REDIRECT_FROM_KEY] || []).filter((entry) => entry !== oldHref)
data[REDIRECT_FROM_KEY] = (data[REDIRECT_FROM_KEY] || []).filter(
(entry: any) => entry !== oldHref,
)
if (data[REDIRECT_FROM_KEY].length === 0) {
delete data[REDIRECT_FROM_KEY]
}
fs.writeFileSync(
newPath,
readFrontmatter.stringify(content, data, { lineWidth: 10000 }),
readFrontmatter.stringify(content, data, { lineWidth: 10000 } as any),
'utf-8',
)
if (updateParent) {
@@ -577,15 +598,18 @@ function undoFiles(files, updateParent, opts) {
}
}
function findInLearningTracks(href) {
const allFiles = walk(path.join(DATA_ROOT, 'learning-tracks'), {
function findInLearningTracks(href: string) {
const allFiles: string[] = walk(path.join(DATA_ROOT, 'learning-tracks'), {
globs: ['*.yml'],
includeBasePath: true,
directories: false,
})
const found = []
const found: string[] = []
for (const filePath of allFiles) {
const tracks = yaml.load(fs.readFileSync(filePath, 'utf-8'))
const tracks = yaml.load(fs.readFileSync(filePath, 'utf-8')) as Record<
string,
{ guides?: string[] }
>
if (
Object.values(tracks).find((track) => {
@@ -599,7 +623,7 @@ function findInLearningTracks(href) {
return found
}
function changeLearningTracks(filePath, oldHref, newHref) {
function changeLearningTracks(filePath: string, oldHref: string, newHref: string) {
// Can't deserialize and serialize the Yaml because it would lose
// formatting and comments. So regex replace it.
const regex = new RegExp(`- ${oldHref}$`, 'gm')
@@ -608,7 +632,7 @@ function changeLearningTracks(filePath, oldHref, newHref) {
fs.writeFileSync(filePath, newContent, 'utf-8')
}
function changeHomepageLinks(oldHref, newHref, verbose) {
function changeHomepageLinks(oldHref: string, newHref: string, verbose: boolean) {
// Can't deserialize and serialize the Yaml because it would lose
// formatting and comments. So regex replace it.
// Homepage childGroup links do not have a leading '/', so we need to remove that.
@@ -625,7 +649,7 @@ function changeHomepageLinks(oldHref, newHref, verbose) {
}
}
function changeFeaturedLinks(oldHref, newHref) {
function changeFeaturedLinks(oldHref: string, newHref: string): void {
const allFiles = walk(CONTENT_ROOT, {
globs: ['**/*.md'],
includeBasePath: true,
@@ -638,8 +662,9 @@ function changeFeaturedLinks(oldHref, newHref) {
let changed = false
const fileContent = fs.readFileSync(file, 'utf-8')
const { content, data } = readFrontmatter(fileContent)
if (!data) continue
const featuredLinks = data.featuredLinks || {}
for (const [key, entries] of Object.entries(featuredLinks)) {
for (const [key, entries] of Object.entries(featuredLinks) as [string, string[]][]) {
if (key === 'popularHeading') {
continue
}
@@ -654,7 +679,7 @@ function changeFeaturedLinks(oldHref, newHref) {
if (changed) {
fs.writeFileSync(
file,
readFrontmatter.stringify(content, data, { lineWidth: 10000 }),
readFrontmatter.stringify(content, data, { lineWidth: 10000 } as any),
'utf-8',
)
}

View File

@@ -4,7 +4,6 @@ Custom "Alerts", based on similar filter/styling in the monolith code.
import { visit } from 'unist-util-visit'
import { h } from 'hastscript'
// @ts-ignore - no types available for @primer/octicons
import octicons from '@primer/octicons'
import type { Element } from 'hast'

View File

@@ -7,7 +7,6 @@ import yaml from 'js-yaml'
import fs from 'fs'
import { visit } from 'unist-util-visit'
import { h } from 'hastscript'
// @ts-ignore - no types available for @primer/octicons
import octicons from '@primer/octicons'
import { parse } from 'parse5'
import { fromParse5 } from 'hast-util-from-parse5'
@@ -17,6 +16,7 @@ import type { Element } from 'hast'
interface LanguageConfig {
name: string
// Using any for language properties that can vary (aliases, extensions, etc.)
[key: string]: any
}
@@ -107,12 +107,14 @@ export function header(
function btnIcon(): Element {
const btnIconHtml: string = octicons.copy.toSVG()
const btnIconAst = parse(String(btnIconHtml), { sourceCodeLocationInfo: true })
// @ts-ignore - fromParse5 file option typing issue
const btnIconElement = fromParse5(btnIconAst, { file: btnIconHtml })
// Using any because fromParse5 expects VFile but we only have a string
// This is safe because parse5 only needs the string content
const btnIconElement = fromParse5(btnIconAst, { file: btnIconHtml as any })
return btnIconElement as Element
}
// Using any due to conflicting unist/hast type definitions between dependencies
// node can be various mdast/hast node types, return value contains meta properties from code blocks
export function getPreMeta(node: any): Record<string, any> {
// Here's why this monstrosity works:
// https://github.com/syntax-tree/mdast-util-to-hast/blob/c87cd606731c88a27dbce4bfeaab913a9589bf83/lib/handlers/code.js#L40-L42

View File

@@ -4,14 +4,14 @@
import { find } from 'unist-util-find'
import { h } from 'hastscript'
// @ts-ignore - @primer/octicons doesn't have TypeScript declarations
import octicons from '@primer/octicons'
import { parse } from 'parse5'
import { fromParse5 } from 'hast-util-from-parse5'
import { getPreMeta } from './code-header'
// node and tree are hast/unist AST nodes without proper TypeScript definitions
// Returns a hast element node for the prompt button
// Using any because node and tree are hast/unist AST nodes without proper TypeScript definitions
// node is a pre element from the AST, tree is the full document AST
// Returns a hast element node for the prompt button, or null if no prompt meta exists
export function getPrompt(node: any, tree: any, code: string): any {
const hasPrompt = Boolean(getPreMeta(node).prompt)
if (!hasPrompt) return null
@@ -31,7 +31,8 @@ export function getPrompt(node: any, tree: any, code: string): any {
)
}
// node and tree are hast/unist AST nodes without proper TypeScript definitions
// Using any because node and tree are hast/unist AST nodes without proper TypeScript definitions
// node is the current code block element, tree is used to find referenced code blocks
function buildPromptData(
node: any,
tree: any,
@@ -51,7 +52,8 @@ function buildPromptData(
console.warn(`Can't find referenced code block with id=${ref}`)
return promptOnly(code)
}
// Cast needed to access children property on untyped AST node
// Using any to access children property on untyped hast element node
// AST structure: element -> code -> text node with value property
const matchingCode = (matchingCodeEl as any)?.children[0].children[0].value || null
return promptAndContext(code, matchingCode)
}
@@ -73,18 +75,21 @@ function promptAndContext(
}
}
// tree and node are hast/unist AST nodes without proper TypeScript definitions
// Using any because tree and node are hast/unist AST nodes without proper TypeScript definitions
// Searches the AST tree for a code block with matching id in meta
function findMatchingCode(ref: string, tree: any): any {
return find(tree, (node: any) => {
// Cast needed to access tagName property on untyped element node
// Using any to access tagName property on untyped hast element node
return node.type === 'element' && (node as any).tagName === 'pre' && getPreMeta(node).id === ref
})
}
// Returns a hast element node for the Copilot icon
// Using any return type because fromParse5 returns untyped hast nodes
function copilotIcon(): any {
const copilotIconHtml = octicons.copilot.toSVG()
const copilotIconAst = parse(String(copilotIconHtml), { sourceCodeLocationInfo: true })
const copilotIconElement = fromParse5(copilotIconAst, { file: copilotIconHtml })
// Using any because fromParse5 expects VFile but we only have a string
const copilotIconElement = fromParse5(copilotIconAst, { file: copilotIconHtml as any })
return copilotIconElement
}

View File

@@ -75,7 +75,7 @@ export function shouldShowExperiment(
// Allow developers to override their experiment group for the current session
export const controlGroupOverride = {} as { [key in ExperimentNames]: 'treatment' | 'control' }
if (typeof window !== 'undefined') {
// @ts-expect-error
// @ts-expect-error globally available function
window.overrideControlGroup = (
experimentKey: ExperimentNames,
controlGroup: 'treatment' | 'control',

View File

@@ -1,4 +1,3 @@
// @ts-nocheck
import fs from 'fs/promises'
import { appendFileSync } from 'fs'
import path from 'path'
@@ -16,8 +15,24 @@ import {
getIgnoredChangesSummary,
} from './build-changelog'
// Type definitions
interface GitHubRepoOptions {
owner: string
repo: string
ref?: string
path?: string
}
interface IgnoredChange {
version: string
totalCount: number
types: Array<{ type: string }>
}
const graphqlStaticDir = 'src/graphql/data'
const dataFilenames = JSON.parse(await fs.readFile('src/graphql/scripts/utils/data-filenames.json'))
const dataFilenames = JSON.parse(
await fs.readFile('src/graphql/scripts/utils/data-filenames.json', 'utf8'),
)
// check for required PAT
if (!process.env.GITHUB_TOKEN) {
@@ -28,7 +43,7 @@ const versionsToBuild = Object.keys(allVersions)
main()
const allIgnoredChanges = []
const allIgnoredChanges: IgnoredChange[] = []
async function main() {
for (const version of versionsToBuild) {
@@ -39,7 +54,11 @@ async function main() {
// 1. UPDATE PREVIEWS
const previewsPath = getDataFilepath('previews', graphqlVersion)
const safeForPublicPreviews = yaml.load(await getRemoteRawContent(previewsPath, graphqlVersion))
// GraphQL preview data structure - complex nested object from YAML
// Using any because processPreviews is an external utility without type definitions
const safeForPublicPreviews = yaml.load(
await getRemoteRawContent(previewsPath, graphqlVersion),
) as any
const previewsJson = processPreviews(safeForPublicPreviews)
await updateStaticFile(
previewsJson,
@@ -48,7 +67,10 @@ async function main() {
// 2. UPDATE UPCOMING CHANGES
const upcomingChangesPath = getDataFilepath('upcomingChanges', graphqlVersion)
const previousUpcomingChanges = yaml.load(await fs.readFile(upcomingChangesPath, 'utf8'))
// GraphQL upcoming changes data - contains upcoming_changes array
const previousUpcomingChanges = yaml.load(await fs.readFile(upcomingChangesPath, 'utf8')) as {
upcoming_changes: unknown[]
}
const safeForPublicChanges = await getRemoteRawContent(upcomingChangesPath, graphqlVersion)
await updateFile(upcomingChangesPath, safeForPublicChanges)
const upcomingChangesJson = await processUpcomingChanges(safeForPublicChanges)
@@ -63,9 +85,10 @@ async function main() {
const previousSchemaString = await fs.readFile(previewFilePath, 'utf8')
const latestSchema = await getRemoteRawContent(previewFilePath, graphqlVersion)
await updateFile(previewFilePath, latestSchema)
// Using any because processSchemas returns complex GraphQL schema structures
const schemaJsonPerVersion = await processSchemas(latestSchema, safeForPublicPreviews) // This is slow!
await updateStaticFile(
schemaJsonPerVersion,
schemaJsonPerVersion as any,
path.join(graphqlStaticDir, graphqlVersion, 'schema.json'),
)
@@ -76,8 +99,9 @@ async function main() {
previousSchemaString,
latestSchema,
safeForPublicPreviews,
previousUpcomingChanges.upcoming_changes,
yaml.load(safeForPublicChanges).upcoming_changes,
previousUpcomingChanges.upcoming_changes as any,
(yaml.load(safeForPublicChanges) as { upcoming_changes: unknown[] })
.upcoming_changes as any,
)
if (changelogEntry) {
prependDatedEntry(
@@ -124,31 +148,31 @@ async function main() {
}
// get latest from github/github
async function getRemoteRawContent(filepath, graphqlVersion) {
const options = {
async function getRemoteRawContent(filepath: string, graphqlVersion: string) {
const options: GitHubRepoOptions = {
owner: 'github',
repo: 'github',
}
// find the relevant branch in github/github and set it as options.ref
let t0 = new Date()
let t0 = new Date().getTime()
options.ref = await getBranchAsRef(options, graphqlVersion)
let took = new Date() - t0
let took = new Date().getTime() - t0
console.log(`Got ref (${options.ref}) for '${graphqlVersion}'. Took ${formatTime(took)}`)
// add the filepath to the options so we can get the contents of the file
options.path = `config/${path.basename(filepath)}`
t0 = new Date()
const contents = await getContents(...Object.values(options))
took = new Date() - t0
t0 = new Date().getTime()
const contents = await getContents(options.owner, options.repo, options.ref, options.path)
took = new Date().getTime() - t0
console.log(`Got content for '${options.path}' (in ${options.ref}). Took ${formatTime(took)}`)
return contents
}
// find the relevant filepath in src/graphql/scripts/util/data-filenames.json
function getDataFilepath(id, graphqlVersion) {
function getDataFilepath(id: string, graphqlVersion: string) {
const versionType = getVersionName(graphqlVersion)
// for example, dataFilenames['schema']['ghes'] = schema.docs-enterprise.graphql
@@ -157,11 +181,15 @@ function getDataFilepath(id, graphqlVersion) {
return path.join(graphqlStaticDir, graphqlVersion, filename)
}
async function getBranchAsRef(options, graphqlVersion, branch = false) {
const versionType = getVersionName(graphqlVersion)
async function getBranchAsRef(
options: GitHubRepoOptions,
graphqlVersion: string,
branch: string | boolean = false,
): Promise<string> {
const versionType = getVersionName(graphqlVersion) as 'fpt' | 'ghec' | 'ghes'
const defaultBranch = 'master'
const branches = {
const branches: Record<string, string> = {
fpt: defaultBranch,
ghec: defaultBranch,
ghes: `enterprise-${graphqlVersion.replace('ghes-', '')}-release`,
@@ -174,7 +202,7 @@ async function getBranchAsRef(options, graphqlVersion, branch = false) {
const ref = `heads/${branch}`
// check whether the branch can be found in github/github
const exists = await hasMatchingRef(...Object.values(options), ref)
const exists = await hasMatchingRef(options.owner, options.repo, ref)
// if ref is not found, the branch cannot be found, so try a fallback
if (!exists) {
@@ -186,23 +214,25 @@ async function getBranchAsRef(options, graphqlVersion, branch = false) {
// given a GraphQL version like `ghes-2.22`, return `ghes`;
// given a GraphQL version like `dotcom`, return as is
function getVersionName(graphqlVersion) {
function getVersionName(graphqlVersion: string) {
return graphqlVersion.split('-')[0]
}
async function updateFile(filepath, content) {
async function updateFile(filepath: string, content: string) {
console.log(`Updating file ${filepath}`)
await mkdirp(path.dirname(filepath))
return fs.writeFile(filepath, content, 'utf8')
}
async function updateStaticFile(json, filepath) {
// JSON data from GraphQL schema processing - complex nested structures
// Using any because the structure varies (arrays, objects, nested schemas, etc.)
async function updateStaticFile(json: any, filepath: string) {
console.log(`Updating static file ${filepath}`)
const jsonString = JSON.stringify(json, null, 2)
return updateFile(filepath, jsonString)
}
function formatTime(ms) {
function formatTime(ms: number) {
if (ms < 1000) {
return `${ms.toFixed(0)}ms`
}

View File

@@ -67,7 +67,8 @@ export const CookBookFilter = ({
placeholder={t('search_articles')}
ref={inputRef}
autoComplete="false"
onChange={(e) => {
// Using any because Primer React's TextInput doesn't export proper event types
onChange={(e: any) => {
const query = e.target.value || ''
onSearch(query)
}}

View File

@@ -174,7 +174,8 @@ function RestNavListItem({ category }: { category: ProductTreeNode }) {
<NavList.Item
defaultOpen={routePath.includes(childPage.href)}
key={childPage.href}
onClick={(event) => {
// Using any because Primer React's NavList doesn't export proper event types
onClick={(event: any) => {
event.preventDefault()
push(childPage.href)
}}

View File

@@ -223,7 +223,7 @@ export const ArticleGrid = ({ tocItems, includedCategories, landingType }: Artic
placeholder={t('article_grid.search_articles')}
ref={inputRef}
autoComplete="false"
onChange={(e) => {
onChange={(e: React.ChangeEvent<HTMLInputElement>) => {
const query = e.target.value || ''
handleSearch(query)
}}

View File

@@ -67,7 +67,9 @@ export async function updateInternalLinks(files: string[], options = {}) {
async function updateFile(
file: string,
context: {
// Using any because page data structures vary by page type (articles, guides, etc.)
pages: Record<string, any>
// Using any because redirects can be strings or redirect objects with various properties
redirects: any
currentLanguage: string
userLanguage: string
@@ -92,7 +94,9 @@ async function updateFile(
let newContent = content
const ast = fromMarkdown(newContent)
// Using any[] because replacements can contain various mdast node types with different structures
const replacements: any[] = []
// Using any[] because warnings contain various error information depending on the issue type
const warnings: any[] = []
const newData = structuredClone(data)
@@ -102,6 +106,7 @@ async function updateFile(
// This configuration determines which nested things to bother looking
// into.
// Using any because frontmatter values can be strings, arrays, or nested objects
const HAS_LINKS: Record<string, any> = {
featuredLinks: ['gettingStarted', 'startHere', 'guideCards', 'popular'],
introLinks: ANY,
@@ -215,8 +220,7 @@ async function updateFile(
const hasQuotesAroundLink = content.includes(`"${asMarkdown}`)
// @ts-ignore
const xValue = node?.children?.[0]?.value
const xValue = (node?.children?.[0] as any)?.value
if (opts.setAutotitle) {
if (hasQuotesAroundLink) {
@@ -370,9 +374,12 @@ function linkMatcher(node: Node) {
}
function getNewFrontmatterLinkList(
// Using any[] because frontmatter links can be strings or objects with href/title properties
list: any[],
context: {
// Using any because page data structures vary by page type
pages: Record<string, any>
// Using any because redirects can be strings or redirect objects
redirects: any
currentLanguage: string
userLanguage: string
@@ -447,6 +454,7 @@ function getNewFrontmatterLinkList(
// Try to return the line in the raw content that entry was on.
// It's hard to know exactly because the `entry` is the result of parsing
// the YAML, most likely, from the front
// Using any because entry can be a string or an object with various link properties
function findLineNumber(entry: any, rawContent: string) {
let number = 0
for (const line of rawContent.split(/\n/g)) {
@@ -480,6 +488,7 @@ function stripLiquid(text: string) {
return text
}
// Using any[] for generic array comparison - works with strings, objects, etc.
function equalArray(arr1: any[], arr2: any[]) {
return arr1.length === arr2.length && arr1.every((item, i) => item === arr2[i])
}
@@ -487,7 +496,9 @@ function equalArray(arr1: any[], arr2: any[]) {
function getNewHref(
href: string,
context: {
// Using any because page data structures vary by page type
pages: Record<string, any>
// Using any because redirects can be strings or redirect objects
redirects: any
currentLanguage: string
userLanguage: string

View File

@@ -3,7 +3,6 @@ import { describe, expect, test } from 'vitest'
import { getJsonValidator } from '@/tests/lib/validate-json-schema'
import { productMap } from '@/products/lib/all-products'
import { formatAjvErrors } from '@/tests/helpers/schemas'
// @ts-ignore - Products schema doesn't have TypeScript types yet
import schema from '@/tests/helpers/schemas/products-schema'
const validate = getJsonValidator(schema)

View File

@@ -1,8 +1,6 @@
// @ts-ignore - no types available
import httpStatusCodes from 'http-status-code'
import { get, isPlainObject } from 'lodash-es'
import { parseTemplate } from 'url-template'
// @ts-ignore - no types available
import mergeAllOf from 'json-schema-merge-allof'
import { renderContent } from './render-content'
@@ -178,7 +176,7 @@ export default class Operation {
const mergedAllofSchema = mergeAllOf(schema)
try {
this.bodyParameters = isPlainObject(schema)
? await getBodyParams(mergedAllofSchema, true)
? await getBodyParams(mergedAllofSchema as any, true)
: []
} catch (error) {
console.error(error)

View File

@@ -995,7 +995,7 @@ function renderSearchGroups(
tabIndex={-1}
ref={(element: HTMLLIElement | null) => {
if (listElementsRef.current) {
listElementsRef.current[indexWithOffset] = element
listElementsRef.current[index] = element
}
}}
>

View File

@@ -176,7 +176,7 @@ function ResultsPagination({ page, totalPages }: { page: number; totalPages: num
pageCount={Math.min(totalPages, 10)}
currentPage={page}
hrefBuilder={hrefBuilder}
onPageChange={(event, pageNum) => {
onPageChange={(event: React.MouseEvent, pageNum: number) => {
event.preventDefault()
const [pathRoot, pathQuery = ''] = router.asPath.split('#')[0].split('?')

View File

@@ -1,4 +1,4 @@
import { useEffect, useState } from 'react'
import React, { useEffect, useState } from 'react'
import Cookies from '@/frame/components/lib/cookies'
import { UnderlineNav } from '@primer/react'
import { sendEvent } from '@/events/components/events'
@@ -156,7 +156,7 @@ export const InArticlePicker = ({
href={`?${params}`}
key={option.value}
aria-current={option.value === currentValue ? 'page' : undefined}
onSelect={(event) => {
onSelect={(event: React.MouseEvent | React.KeyboardEvent) => {
event.preventDefault()
onClickChoice(option.value)
}}

View File

@@ -0,0 +1,15 @@
declare module '@github/markdownlint-github' {
interface MarkdownlintRule {
names: string[]
description: string
tags: string[]
// Using any because @github/markdownlint-github doesn't provide TypeScript definitions
// params contains markdownlint parsing context with varying structures per rule
// onError is a callback function with dynamic signature
function: (params: any, onError: any) => void
}
const markdownlintGitHub: MarkdownlintRule[]
export default markdownlintGitHub
}

86
src/types/http-status-code.d.ts vendored Normal file
View File

@@ -0,0 +1,86 @@
declare module 'http-status-code' {
/**
* HTTP status code definitions
*/
const httpStatusCodes: {
// 1xx Informational
CONTINUE: 100
SWITCHING_PROTOCOLS: 101
PROCESSING: 102
EARLY_HINTS: 103
// 2xx Success
OK: 200
CREATED: 201
ACCEPTED: 202
NON_AUTHORITATIVE_INFORMATION: 203
NO_CONTENT: 204
RESET_CONTENT: 205
PARTIAL_CONTENT: 206
MULTI_STATUS: 207
ALREADY_REPORTED: 208
IM_USED: 226
// 3xx Redirection
MULTIPLE_CHOICES: 300
MOVED_PERMANENTLY: 301
FOUND: 302
SEE_OTHER: 303
NOT_MODIFIED: 304
USE_PROXY: 305
TEMPORARY_REDIRECT: 307
PERMANENT_REDIRECT: 308
// 4xx Client Error
BAD_REQUEST: 400
UNAUTHORIZED: 401
PAYMENT_REQUIRED: 402
FORBIDDEN: 403
NOT_FOUND: 404
METHOD_NOT_ALLOWED: 405
NOT_ACCEPTABLE: 406
PROXY_AUTHENTICATION_REQUIRED: 407
REQUEST_TIMEOUT: 408
CONFLICT: 409
GONE: 410
LENGTH_REQUIRED: 411
PRECONDITION_FAILED: 412
PAYLOAD_TOO_LARGE: 413
URI_TOO_LONG: 414
UNSUPPORTED_MEDIA_TYPE: 415
RANGE_NOT_SATISFIABLE: 416
EXPECTATION_FAILED: 417
IM_A_TEAPOT: 418
MISDIRECTED_REQUEST: 421
UNPROCESSABLE_ENTITY: 422
LOCKED: 423
FAILED_DEPENDENCY: 424
TOO_EARLY: 425
UPGRADE_REQUIRED: 426
PRECONDITION_REQUIRED: 428
TOO_MANY_REQUESTS: 429
REQUEST_HEADER_FIELDS_TOO_LARGE: 431
UNAVAILABLE_FOR_LEGAL_REASONS: 451
// 5xx Server Error
INTERNAL_SERVER_ERROR: 500
NOT_IMPLEMENTED: 501
BAD_GATEWAY: 502
SERVICE_UNAVAILABLE: 503
GATEWAY_TIMEOUT: 504
HTTP_VERSION_NOT_SUPPORTED: 505
VARIANT_ALSO_NEGOTIATES: 506
INSUFFICIENT_STORAGE: 507
LOOP_DETECTED: 508
NOT_EXTENDED: 510
NETWORK_AUTHENTICATION_REQUIRED: 511
// Methods
getMessage(statusCode: number, protocol?: string): string
// Allow numeric access
[statusCode: number]: number | ((statusCode: number, protocol?: string) => string)
}
export default httpStatusCodes
}

2
src/types/index.ts Normal file
View File

@@ -0,0 +1,2 @@
// Re-export all types from types.ts for backward compatibility
export * from './types'

50
src/types/json-schema-merge-allof.d.ts vendored Normal file
View File

@@ -0,0 +1,50 @@
declare module 'json-schema-merge-allof' {
/**
* JSON Schema object that may contain allOf
*/
interface JSONSchema {
allOf?: JSONSchema[]
properties?: Record<string, JSONSchema>
required?: string[]
type?: string | string[]
items?: JSONSchema | JSONSchema[]
additionalProperties?: boolean | JSONSchema
[key: string]: any // JSON Schema allows arbitrary additional properties per spec
}
/**
* Options for merging allOf schemas
*/
interface MergeAllOfOptions {
/**
* Resolvers for custom keywords
* Using any because this third-party library has dynamic schema structures
* that vary based on the JSON Schema specification
*/
resolvers?: Record<
string,
(values: any[], path: string[], mergeSchemas: any, options: any) => any
>
/**
* Whether to ignore additional properties conflicts
*/
ignoreAdditionalProperties?: boolean
/**
* Deep merge objects instead of replacing
*/
deep?: boolean
}
/**
* Merges JSON schemas that use allOf into a single schema
*
* @param schema - The JSON schema containing allOf
* @param options - Merge options
* @returns The merged schema without allOf
*/
function mergeAllOf(schema: JSONSchema, options?: MergeAllOfOptions): JSONSchema
export default mergeAllOf
}

14
src/types/markdownlint-lib-rules.d.ts vendored Normal file
View File

@@ -0,0 +1,14 @@
declare module '*/markdownlint/lib/rules' {
interface MarkdownlintRule {
names: string[]
description: string
tags: string[]
// Using any because markdownlint doesn't provide TypeScript definitions
// params contains parsing context with varying structures per rule
// onError is a callback function with dynamic signature
function: (params: any, onError: any) => void
}
const rules: MarkdownlintRule[]
export default rules
}

View File

@@ -0,0 +1,40 @@
declare module 'markdownlint-rule-helpers' {
/**
* Adds an error to the linting results
* Using any because this third-party library doesn't provide TypeScript definitions
* onError is a callback function with dynamic signature from markdownlint
* fixInfo contains various fix information structures depending on the error type
*/
export function addError(
onError: any,
lineNumber: number,
detail?: string,
context?: string | null,
range?: [number, number] | number[] | string | null,
fixInfo?: any,
): void
/**
* Filters tokens by type and calls a handler for each matching token
* Using any because markdownlint-rule-helpers has no TypeScript definitions
* params contains markdownlint parsing parameters with varying structures
* token represents markdown tokens with different properties per token type
*/
export function filterTokens(params: any, type: string, handler: (token: any) => void): void
/**
* Truncates long strings with ellipsis for display
*/
export function ellipsify(text: string, length?: number, preferEnd?: boolean): string
/**
* Regular expression for newline characters
*/
export const newLineRe: RegExp
/**
* Applies fixes to markdown content
* Using any[] because error objects from markdownlint have dynamic structures
*/
export function applyFixes(content: string, errors: any[]): string
}

View File

@@ -0,0 +1,12 @@
declare module 'markdownlint-rule-search-replace' {
const searchReplace: {
names: string[]
description: string
tags: string[]
// Using any because this is a third-party library without proper TypeScript definitions
// params contains markdownlint-specific data structures, onError is a callback function
function: (params: any, onError: any) => void
}
export default searchReplace
}

72
src/types/primer__octicons.d.ts vendored Normal file
View File

@@ -0,0 +1,72 @@
declare module '@primer/octicons' {
interface OcticonOptions {
width?: number | string
height?: number | string
'aria-label'?: string
'aria-hidden'?: string | boolean
class?: string
fill?: string
[key: string]: any
}
interface Octicon {
/**
* The SVG path data for the icon
*/
path: string
/**
* The default width of the icon
*/
width: number
/**
* The default height of the icon
*/
height: number
/**
* Heights-based icon data
*/
heights: {
[size: number]: {
path: string
width: number
height: number
}
}
/**
* Convert the octicon to an SVG string
*/
toSVG(options?: OcticonOptions): string
}
const octicons: {
[iconName: string]: Octicon
// Common icons (non-exhaustive list for better autocomplete)
alert: Octicon
check: Octicon
'check-circle': Octicon
chevron: Octicon
'chevron-down': Octicon
'chevron-left': Octicon
'chevron-right': Octicon
'chevron-up': Octicon
code: Octicon
copy: Octicon
copilot: Octicon
download: Octicon
gear: Octicon
info: Octicon
link: Octicon
'link-external': Octicon
'mark-github': Octicon
search: Octicon
'triangle-down': Octicon
x: Octicon
}
export default octicons
}