diff --git a/eslint.config.ts b/eslint.config.ts index 2d4fdfca64..ebbdbc052a 100644 --- a/eslint.config.ts +++ b/eslint.config.ts @@ -97,7 +97,6 @@ export default [ // Disabled rules to review '@typescript-eslint/ban-ts-comment': 'off', // 50+ - 'no-shadow': 'off', // 150+ 'github/array-foreach': 'off', // 250+ 'no-console': 'off', // 800+ '@typescript-eslint/no-explicit-any': 'off', // 1000+ diff --git a/next.config.ts b/next.config.ts index ce4e8d3a92..677f17c788 100644 --- a/next.config.ts +++ b/next.config.ts @@ -49,11 +49,11 @@ const config: NextConfig = { } }) }, - webpack: (config) => { - config.experiments = config.experiments || {} - config.experiments.topLevelAwait = true - config.resolve.fallback = { fs: false, async_hooks: false } - return config + webpack: (webpackConfig) => { + webpackConfig.experiments = webpackConfig.experiments || {} + webpackConfig.experiments.topLevelAwait = true + webpackConfig.resolve.fallback = { fs: false, async_hooks: false } + return webpackConfig }, // https://nextjs.org/docs/api-reference/next.config.js/compression diff --git a/src/assets/middleware/dynamic-assets.ts b/src/assets/middleware/dynamic-assets.ts index 6629891103..25ed3fb67c 100644 --- a/src/assets/middleware/dynamic-assets.ts +++ b/src/assets/middleware/dynamic-assets.ts @@ -143,9 +143,9 @@ export default async function dynamicAssets( const buffer = await image.webp({ effort }).toBuffer() assetCacheControl(res) return res.type('image/webp').send(buffer) - } catch (error) { - if (error instanceof Error && (error as any).code !== 'ENOENT') { - throw error + } catch (catchError) { + if (catchError instanceof Error && (catchError as any).code !== 'ENOENT') { + throw catchError } } } diff --git a/src/assets/tests/static-assets.ts b/src/assets/tests/static-assets.ts index 272aa86201..4a1c9ed217 100644 --- a/src/assets/tests/static-assets.ts +++ b/src/assets/tests/static-assets.ts @@ -16,13 +16,13 @@ function getNextStaticAsset(directory: string) { return path.join(root, files[0]) } -function mockRequest(path: string, { headers }: { headers?: Record } = {}) { +function mockRequest(requestPath: string, { headers }: { headers?: Record } = {}) { const _headers = Object.fromEntries( Object.entries(headers || {}).map(([key, value]) => [key.toLowerCase(), value]), ) return { - path, - url: path, + path: requestPath, + url: requestPath, get: (header: string) => { return _headers[header.toLowerCase()] }, @@ -74,8 +74,8 @@ const mockResponse = () => { if (typeof key === 'string') { res.headers[key.toLowerCase()] = value } else { - for (const [k, value] of Object.entries(key)) { - res.headers[k.toLowerCase()] = value + for (const [k, v] of Object.entries(key)) { + res.headers[k.toLowerCase()] = v } } } @@ -319,9 +319,9 @@ describe('archived enterprise static assets', () => { }, ])( 'should return $expectStatus for $name', - ({ name, path, referrer, expectStatus, shouldCallNext }) => { + ({ name, path: testPath, referrer, expectStatus, shouldCallNext }) => { test(name, async () => { - const req = mockRequest(path, { + const req = mockRequest(testPath, { headers: { Referrer: referrer, }, @@ -359,22 +359,25 @@ describe('archived enterprise static assets', () => { expectStatus: undefined, shouldCallNext: true, }, - ])('should not suppress $name', ({ name, path, referrer, expectStatus, shouldCallNext }) => { - test(name, async () => { - const req = mockRequest(path, { - headers: { - Referrer: referrer, - }, + ])( + 'should not suppress $name', + ({ name, path: testPath, referrer, expectStatus, shouldCallNext }) => { + test(name, async () => { + const req = mockRequest(testPath, { + headers: { + Referrer: referrer, + }, + }) + const res = mockResponse() + let nexted = false + const next = () => { + nexted = true + } + setDefaultFastlySurrogateKey(req, res, () => {}) + await archivedEnterpriseVersionsAssets(req as any, res as any, next) + expect(nexted).toBe(shouldCallNext) + expect(res.statusCode).toBe(expectStatus) }) - const res = mockResponse() - let nexted = false - const next = () => { - nexted = true - } - setDefaultFastlySurrogateKey(req, res, () => {}) - await archivedEnterpriseVersionsAssets(req as any, res as any, next) - expect(nexted).toBe(shouldCallNext) - expect(res.statusCode).toBe(expectStatus) - }) - }) + }, + ) }) diff --git a/src/codeql-cli/scripts/sync.ts b/src/codeql-cli/scripts/sync.ts index 5f360da6bf..7ca45f732e 100755 --- a/src/codeql-cli/scripts/sync.ts +++ b/src/codeql-cli/scripts/sync.ts @@ -83,8 +83,8 @@ async function setupEnvironment() { // copy the raw rst files to the temp directory and convert them // to Markdownusing pandoc -async function rstToMarkdown(sourceDirectory: string) { - const sourceFiles = walk(sourceDirectory, { +async function rstToMarkdown(rstSourceDirectory: string) { + const sourceFiles = walk(rstSourceDirectory, { includeBasePath: true, globs: ['**/*.rst'], }) diff --git a/src/content-linter/lib/helpers/get-lintable-yml.ts b/src/content-linter/lib/helpers/get-lintable-yml.ts index f697943349..087bf4b5ed 100755 --- a/src/content-linter/lib/helpers/get-lintable-yml.ts +++ b/src/content-linter/lib/helpers/get-lintable-yml.ts @@ -80,15 +80,15 @@ export async function getLintableYml(dataFilePath: string): Promise, dataFilePath: string): Map { - const keys = Array.from(mdDict.keys()) +function addPathToKey(mdDictMap: Map, dataFilePath: string): Map { + const keys = Array.from(mdDictMap.keys()) keys.forEach((key) => { const newKey = `${dataFilePath} ${key}` - const value = mdDict.get(key) + const value = mdDictMap.get(key) if (value !== undefined) { - mdDict.delete(key) - mdDict.set(newKey, value) + mdDictMap.delete(key) + mdDictMap.set(newKey, value) } }) - return mdDict + return mdDictMap } diff --git a/src/content-linter/lib/linting-rules/frontmatter-hero-image.ts b/src/content-linter/lib/linting-rules/frontmatter-hero-image.ts index e7cd45bd8b..e163cb641c 100644 --- a/src/content-linter/lib/linting-rules/frontmatter-hero-image.ts +++ b/src/content-linter/lib/linting-rules/frontmatter-hero-image.ts @@ -45,7 +45,7 @@ export const frontmatterHeroImage: Rule = { // Check if heroImage is an absolute path if (!heroImage.startsWith('/')) { - const line = params.lines.find((line: string) => line.trim().startsWith('heroImage:')) + const line = params.lines.find((ln: string) => ln.trim().startsWith('heroImage:')) const lineNumber = line ? params.lines.indexOf(line) + 1 : 1 addError( onError, @@ -59,7 +59,7 @@ export const frontmatterHeroImage: Rule = { // Check if heroImage points to banner-images directory if (!heroImage.startsWith('/assets/images/banner-images/')) { - const line = params.lines.find((line: string) => line.trim().startsWith('heroImage:')) + const line = params.lines.find((ln: string) => ln.trim().startsWith('heroImage:')) const lineNumber = line ? params.lines.indexOf(line) + 1 : 1 addError( onError, @@ -74,7 +74,7 @@ export const frontmatterHeroImage: Rule = { // Check if the file actually exists const validHeroImages = getValidHeroImages() if (validHeroImages.length > 0 && !validHeroImages.includes(heroImage)) { - const line = params.lines.find((line: string) => line.trim().startsWith('heroImage:')) + const line = params.lines.find((ln: string) => ln.trim().startsWith('heroImage:')) const lineNumber = line ? params.lines.indexOf(line) + 1 : 1 const availableImages = validHeroImages.join(', ') addError( diff --git a/src/content-linter/lib/linting-rules/frontmatter-intro-links.ts b/src/content-linter/lib/linting-rules/frontmatter-intro-links.ts index 2db601eb6f..f07a0394c8 100644 --- a/src/content-linter/lib/linting-rules/frontmatter-intro-links.ts +++ b/src/content-linter/lib/linting-rules/frontmatter-intro-links.ts @@ -48,8 +48,8 @@ export const frontmatterIntroLinks: Rule = { for (const key of Object.keys(introLinks)) { if (!validKeys.includes(key)) { // Find the line with this key - const line = params.lines.find((line: string) => { - const trimmed = line.trim() + const line = params.lines.find((ln: string) => { + const trimmed = ln.trim() return trimmed.startsWith(`${key}:`) && !trimmed.startsWith('introLinks:') }) const lineNumber = line ? params.lines.indexOf(line) + 1 : 1 diff --git a/src/content-linter/lib/linting-rules/frontmatter-schema.ts b/src/content-linter/lib/linting-rules/frontmatter-schema.ts index dc67da8449..d616a2021d 100644 --- a/src/content-linter/lib/linting-rules/frontmatter-schema.ts +++ b/src/content-linter/lib/linting-rules/frontmatter-schema.ts @@ -24,7 +24,7 @@ export const frontmatterSchema: Rule = { for (const key of deprecatedKeys) { // Early access articles are allowed to have deprecated properties if (params.name.includes('early-access')) continue - const line = params.lines.find((line: string) => line.trim().startsWith(key)) + const line = params.lines.find((ln: string) => ln.trim().startsWith(key)) const lineNumber = params.lines.indexOf(line!) + 1 addError( onError, diff --git a/src/content-linter/scripts/lint-content.ts b/src/content-linter/scripts/lint-content.ts index b516d5f591..60b2264e0b 100755 --- a/src/content-linter/scripts/lint-content.ts +++ b/src/content-linter/scripts/lint-content.ts @@ -262,7 +262,7 @@ async function main() { } const fixableFiles = Object.entries(formattedResults) - .filter(([, results]) => results.some((result) => result.fixable)) + .filter(([, fileResults]) => fileResults.some((flaw) => flaw.fixable)) .map(([file]) => file) if (fixableFiles.length) { console.log('') // Just for some whitespace before the next message @@ -302,7 +302,7 @@ function pluralize(things, word, pluralForm = null) { // (e.g., heading linters) so we need to separate the // list of data files from all other files to run // through markdownlint individually -function getFilesToLint(paths) { +function getFilesToLint(inputPaths) { const fileList = { length: 0, content: [], @@ -316,7 +316,7 @@ function getFilesToLint(paths) { // The path passed to Markdownlint is what is displayed // in the error report, so we want to normalize it and // and make it relative if it's absolute. - for (const rawPath of paths) { + for (const rawPath of inputPaths) { const absPath = path.resolve(rawPath) if (fs.statSync(rawPath).isDirectory()) { if (isInDir(absPath, contentDir)) { @@ -427,16 +427,16 @@ function reportSummaryByRule(results, config) { result. Results are sorted by severity per file, with errors listed first then warnings. */ -function getFormattedResults(allResults, isPrecommit) { +function getFormattedResults(allResults, isInPrecommitMode) { const output = {} Object.entries(allResults) // Each result key always has an array value, but it may be empty .filter(([, results]) => results.length) - .forEach(([key, results]) => { + .forEach(([key, fileResults]) => { if (verbose) { - output[key] = [...results] + output[key] = [...fileResults] } else { - const formattedResults = results.map((flaw) => formatResult(flaw, isPrecommit)) + const formattedResults = fileResults.map((flaw) => formatResult(flaw, isInPrecommitMode)) // Only add the file to output if there are results after filtering if (formattedResults.length > 0) { @@ -465,8 +465,8 @@ function getErrorCountByFile(results, fixed = false) { return getCountBySeverity(results, 'error', fixed) } function getCountBySeverity(results, severityLookup, fixed) { - return Object.values(results).filter((results) => - results.some((result) => { + return Object.values(results).filter((fileResults) => + fileResults.some((result) => { // If --fix was applied, we don't want to know about files that // no longer have errors or warnings. return result.severity === severityLookup && (!fixed || !result.fixable) @@ -477,7 +477,7 @@ function getCountBySeverity(results, severityLookup, fixed) { // Removes null values and properties that are not relevant to content // writers, adds the severity to each result object, and transforms // some error and fix data into a more readable format. -function formatResult(object, isPrecommit) { +function formatResult(object, isInPrecommitMode) { const formattedResult = {} // Add severity to each result object @@ -486,7 +486,8 @@ function formatResult(object, isPrecommit) { throw new Error(`Rule not found in allConfig: '${ruleName}'`) } formattedResult.severity = - allConfig[ruleName].severity || getSearchReplaceRuleSeverity(ruleName, object, isPrecommit) + allConfig[ruleName].severity || + getSearchReplaceRuleSeverity(ruleName, object, isInPrecommitMode) formattedResult.context = allConfig[ruleName].context || '' @@ -540,7 +541,7 @@ function listRules() { Rules that can't be run on partials have the property `partial-markdown-files` set to false. */ -function getMarkdownLintConfig(errorsOnly, runRules) { +function getMarkdownLintConfig(filterErrorsOnly, runRules) { const config = { content: structuredClone(defaultConfig), data: structuredClone(defaultConfig), @@ -559,7 +560,7 @@ function getMarkdownLintConfig(errorsOnly, runRules) { // search-replace is handled differently than other rules because // it has nested metadata and rules. if ( - errorsOnly && + filterErrorsOnly && getRuleSeverity(ruleConfig, isPrecommit) !== 'error' && ruleName !== 'search-replace' ) { @@ -585,7 +586,7 @@ function getMarkdownLintConfig(errorsOnly, runRules) { for (const searchRule of ruleConfig.rules) { const searchRuleSeverity = getRuleSeverity(searchRule, isPrecommit) - if (errorsOnly && searchRuleSeverity !== 'error') continue + if (filterErrorsOnly && searchRuleSeverity !== 'error') continue // Add search-replace rules to frontmatter configuration for rules that make sense in frontmatter // This ensures rules like TODOCS detection work in frontmatter // Rules with applyToFrontmatter should ONLY run in the frontmatter pass (which lints the entire file) @@ -640,14 +641,16 @@ function getMarkdownLintConfig(errorsOnly, runRules) { // Return the severity value of a rule but keep in mind it could be // running as a precommit hook, which means the severity could be // deliberately different. -function getRuleSeverity(rule, isPrecommit) { - return isPrecommit ? rule.precommitSeverity || rule.severity : rule.severity +function getRuleSeverity(ruleConfig, isInPrecommitMode) { + return isInPrecommitMode + ? ruleConfig.precommitSeverity || ruleConfig.severity + : ruleConfig.severity } // Gets a custom rule function from the name of the rule // in the configuration file function getCustomRule(ruleName) { - const rule = customRules.find((rule) => rule.names.includes(ruleName)) + const rule = customRules.find((r) => r.names.includes(ruleName)) if (!rule) throw new Error( `A content-lint rule ('${ruleName}') is configured in the markdownlint config file but does not have a corresponding rule function.`, @@ -696,24 +699,24 @@ export function shouldIncludeRule(ruleName, runRules) { fixInfo: null } */ -function getSearchReplaceRuleSeverity(ruleName, object, isPrecommit) { +function getSearchReplaceRuleSeverity(ruleName, object, isInPrecommitMode) { const pluginRuleName = object.errorDetail.split(':')[0].trim() - const rule = allConfig[ruleName].rules.find((rule) => rule.name === pluginRuleName) - return isPrecommit ? rule.precommitSeverity || rule.severity : rule.severity + const rule = allConfig[ruleName].rules.find((r) => r.name === pluginRuleName) + return isInPrecommitMode ? rule.precommitSeverity || rule.severity : rule.severity } function isOptionsValid() { // paths should only contain existing files and directories - const paths = program.opts().paths || [] - for (const path of paths) { + const optionPaths = program.opts().paths || [] + for (const filePath of optionPaths) { try { - fs.statSync(path) + fs.statSync(filePath) } catch { - if ('paths'.includes(path)) { + if ('paths'.includes(filePath)) { console.log('error: did you mean --paths') } else { console.log( - `error: invalid --paths (-p) option. The value '${path}' is not a valid file or directory`, + `error: invalid --paths (-p) option. The value '${filePath}' is not a valid file or directory`, ) } return false @@ -722,14 +725,14 @@ function isOptionsValid() { // rules should only contain existing, correctly spelled rules const allRulesList = [...allRules.map((rule) => rule.names).flat(), ...Object.keys(allConfig)] - const rules = program.opts().rules || [] - for (const rule of rules) { - if (!allRulesList.includes(rule)) { - if ('rules'.includes(rule)) { + const optionRules = program.opts().rules || [] + for (const ruleName of optionRules) { + if (!allRulesList.includes(ruleName)) { + if ('rules'.includes(ruleName)) { console.log('error: did you mean --rules') } else { console.log( - `error: invalid --rules (-r) option. The value '${rule}' is not a valid rule name.`, + `error: invalid --rules (-r) option. The value '${ruleName}' is not a valid rule name.`, ) } return false diff --git a/src/content-linter/scripts/pretty-print-results.ts b/src/content-linter/scripts/pretty-print-results.ts index d447b1170c..3c0a8124a0 100644 --- a/src/content-linter/scripts/pretty-print-results.ts +++ b/src/content-linter/scripts/pretty-print-results.ts @@ -53,10 +53,10 @@ export function prettyPrintResults( let ruleDescription = '' const errorDetailsByDescription = new Map() - for (const { errorDetail, ruleDescription } of sorted) { - const details = errorDetailsByDescription.get(ruleDescription) || new Set() + for (const { errorDetail, ruleDescription: ruleDesc } of sorted) { + const details = errorDetailsByDescription.get(ruleDesc) || new Set() details.add(errorDetail) - errorDetailsByDescription.set(ruleDescription, details) + errorDetailsByDescription.set(ruleDesc, details) } for (const result of sorted) { diff --git a/src/content-linter/tests/category-pages.ts b/src/content-linter/tests/category-pages.ts index 6366e3d66f..6ea2edb1a7 100644 --- a/src/content-linter/tests/category-pages.ts +++ b/src/content-linter/tests/category-pages.ts @@ -98,10 +98,10 @@ describe.skip('category pages', () => { const indexContents = await fs.promises.readFile(indexAbsPath, 'utf8') const parsed = matter(indexContents) if (!parsed.data) throw new Error('No frontmatter') - const data = parsed.data as MarkdownFrontmatter - categoryVersions = getApplicableVersions(data.versions, indexAbsPath) - allowTitleToDifferFromFilename = data.allowTitleToDifferFromFilename - const articleLinks = data.children.filter((child) => { + const categoryData = parsed.data as MarkdownFrontmatter + categoryVersions = getApplicableVersions(categoryData.versions, indexAbsPath) + allowTitleToDifferFromFilename = categoryData.allowTitleToDifferFromFilename + const articleLinks = categoryData.children.filter((child) => { const mdPath = getPath(productDir, indexLink, child) const fileExists = fs.existsSync(mdPath) return fileExists && fs.statSync(mdPath).isFile() @@ -137,10 +137,10 @@ describe.skip('category pages', () => { articleLinks.map(async (articleLink) => { const articlePath = getPath(productDir, indexLink, articleLink) const articleContents = await fs.promises.readFile(articlePath, 'utf8') - const data = getFrontmatterData(articleContents) + const articleData = getFrontmatterData(articleContents) // Do not include subcategories in list of published articles - if (data.subcategory || data.hidden) return null + if (articleData.subcategory || articleData.hidden) return null // ".../content/github/{category}/{article}.md" => "/{article}" return `/${path.relative(categoryDir, articlePath).replace(/\.md$/, '')}` @@ -159,10 +159,10 @@ describe.skip('category pages', () => { await Promise.all( childFilePaths.map(async (articlePath) => { const articleContents = await fs.promises.readFile(articlePath, 'utf8') - const data = getFrontmatterData(articleContents) + const availableArticleData = getFrontmatterData(articleContents) // Do not include subcategories nor hidden pages in list of available articles - if (data.subcategory || data.hidden) return null + if (availableArticleData.subcategory || availableArticleData.hidden) return null // ".../content/github/{category}/{article}.md" => "/{article}" return `/${path.relative(categoryDir, articlePath).replace(/\.md$/, '')}` @@ -173,10 +173,10 @@ describe.skip('category pages', () => { await Promise.all( childFilePaths.map(async (articlePath) => { const articleContents = await fs.promises.readFile(articlePath, 'utf8') - const data = getFrontmatterData(articleContents) + const versionData = getFrontmatterData(articleContents) articleVersions[articlePath] = getApplicableVersions( - data.versions, + versionData.versions, articlePath, ) as string[] }), @@ -196,8 +196,8 @@ describe.skip('category pages', () => { }) test('contains only articles and subcategories with versions that are also available in the parent category', () => { - Object.entries(articleVersions).forEach(([articleName, articleVersions]) => { - const unexpectedVersions = difference(articleVersions, categoryVersions) + Object.entries(articleVersions).forEach(([articleName, versions]) => { + const unexpectedVersions = difference(versions, categoryVersions) const errorMessage = `${articleName} has versions that are not available in parent category` expect(unexpectedVersions.length, errorMessage).toBe(0) }) diff --git a/src/content-linter/tests/lint-frontmatter-links.ts b/src/content-linter/tests/lint-frontmatter-links.ts index 5e5985dc9f..1b5fa16c7a 100644 --- a/src/content-linter/tests/lint-frontmatter-links.ts +++ b/src/content-linter/tests/lint-frontmatter-links.ts @@ -24,10 +24,10 @@ describe('front matter', () => { } // Using any type because trouble array contains objects with varying error properties const nonWarnings = trouble.filter((t: any) => !t.warning) - for (const { uri, index, redirects } of nonWarnings) { + for (const { uri, index, redirects: redirectTo } of nonWarnings) { customErrorMessage += `\nindex: ${index} URI: ${uri}` - if (redirects) { - customErrorMessage += `\n\tredirects to ${redirects}` + if (redirectTo) { + customErrorMessage += `\n\tredirects to ${redirectTo}` } else { customErrorMessage += '\tPage not found' } diff --git a/src/content-render/scripts/add-content-type.ts b/src/content-render/scripts/add-content-type.ts index a73565a441..15c642f697 100644 --- a/src/content-render/scripts/add-content-type.ts +++ b/src/content-render/scripts/add-content-type.ts @@ -88,7 +88,7 @@ async function main() { console.log(`\nUpdated ${updatedCount} files out of ${processedCount}`) } -function processFile(filePath: string, options: ScriptOptions) { +function processFile(filePath: string, scriptOptions: ScriptOptions) { const fileContent = fs.readFileSync(filePath, 'utf8') const relativePath = path.relative(contentDir, filePath) @@ -100,11 +100,11 @@ function processFile(filePath: string, options: ScriptOptions) { if (!data) return { processed: false, updated: false } // Remove the legacy type property if option is passed - const removeLegacyType = Boolean(options.removeType && data.type) + const removeLegacyType = Boolean(scriptOptions.removeType && data.type) const newContentType = determineContentType(relativePath, data.type || '') - if (options.dryRun) { + if (scriptOptions.dryRun) { console.log(`\n${relativePath}`) if (!data.contentType) { console.log(` ✅ Would set contentType: "${newContentType}"`) @@ -144,7 +144,7 @@ function processFile(filePath: string, options: ScriptOptions) { // Write the file back fs.writeFileSync(filePath, frontmatter.stringify(content, data, { lineWidth: -1 } as any)) - if (options.verbose) { + if (scriptOptions.verbose) { console.log(`\n${relativePath}`) console.log(` ✅ Set contentType: "${newContentType}"`) if (removeLegacyType) { diff --git a/src/content-render/scripts/all-documents/cli.ts b/src/content-render/scripts/all-documents/cli.ts index 45c7701cb3..3e4893ef97 100644 --- a/src/content-render/scripts/all-documents/cli.ts +++ b/src/content-render/scripts/all-documents/cli.ts @@ -115,10 +115,10 @@ async function main(options: Options) { const toJson: AllDocument[] = [] for (const doc of documents) { - const { documents, ...rest } = doc + const { documents: docDocuments, ...rest } = doc toJson.push({ ...rest, - documents, + documents: docDocuments, }) } diff --git a/src/content-render/scripts/all-documents/lib.ts b/src/content-render/scripts/all-documents/lib.ts index 4f2e393180..fc7f304276 100644 --- a/src/content-render/scripts/all-documents/lib.ts +++ b/src/content-render/scripts/all-documents/lib.ts @@ -34,7 +34,7 @@ export async function allDocuments(options: Options): Promise { const site = await warmServer(options.languages) const pages: Page[] = site.pageList - const allDocuments: AllDocument[] = [] + const allDocumentsResult: AllDocument[] = [] type ByVersion = Map const byLanguage = new Map() @@ -96,8 +96,8 @@ export async function allDocuments(options: Options): Promise { } for (const [language, byVersion] of byLanguage) { for (const [version, documents] of byVersion) { - allDocuments.push({ version, language, documents }) + allDocumentsResult.push({ version, language, documents }) } } - return allDocuments + return allDocumentsResult } diff --git a/src/content-render/scripts/move-content.ts b/src/content-render/scripts/move-content.ts index 2a699d7c00..f4b7c86eb6 100755 --- a/src/content-render/scripts/move-content.ts +++ b/src/content-render/scripts/move-content.ts @@ -400,11 +400,11 @@ function addToChildren(newPath, positions, opts) { } if (CHILDGROUPS_KEY in data) { - for (const [groupIndex, childrenPosition] of childGroupPositions) { + for (const [groupIndex, groupChildPosition] of childGroupPositions) { if (groupIndex < data[CHILDGROUPS_KEY].length) { const group = data[CHILDGROUPS_KEY][groupIndex] - if (childrenPosition < group.children.length) { - group.children.splice(childrenPosition, 0, newName) + if (groupChildPosition < group.children.length) { + group.children.splice(groupChildPosition, 0, newName) } else { group.children.push(newName) } diff --git a/src/content-render/scripts/render-content-markdown.ts b/src/content-render/scripts/render-content-markdown.ts index 8e976014d3..76e0f7e1f5 100755 --- a/src/content-render/scripts/render-content-markdown.ts +++ b/src/content-render/scripts/render-content-markdown.ts @@ -35,11 +35,12 @@ for (const page of pages) { fs.mkdirSync(`${contentCopilotDir}/${dirnames}`, { recursive: true }) // Context needed to render the content liquid const req = { language: 'en' } as ExtendedRequest - const contextualize = (req: ExtendedRequest): void => { - if (!req.context) return - if (!req.context.currentVersion) return - req.context.currentVersionObj = req.context.allVersions?.[req.context.currentVersion] - shortVersionsMiddleware(req, null, () => {}) + const contextualize = (request: ExtendedRequest): void => { + if (!request.context) return + if (!request.context.currentVersion) return + request.context.currentVersionObj = + request.context.allVersions?.[request.context.currentVersion] + shortVersionsMiddleware(request, null, () => {}) } req.context = { diff --git a/src/content-render/scripts/test-moved-content.ts b/src/content-render/scripts/test-moved-content.ts index 7b5dcac101..20983915c1 100644 --- a/src/content-render/scripts/test-moved-content.ts +++ b/src/content-render/scripts/test-moved-content.ts @@ -29,10 +29,11 @@ async function main(nameTuple: [string, string]) { if (data) assert(data.redirect_from.includes(oldHref), `Redirect from ${oldHref} not found`) { const parentIndexMd = path.join(path.dirname(after), 'index.md') - const fileContent = fs.readFileSync(parentIndexMd, 'utf-8') - const { data } = readFrontmatter(fileContent) + const parentFileContent = fs.readFileSync(parentIndexMd, 'utf-8') + const { data: parentData } = readFrontmatter(parentFileContent) const afterShortname = `/${after.split('/').slice(-1)[0].replace(/\.md$/, '')}` - if (data) assert(data.children.includes(afterShortname), `Child ${afterShortname} not found`) + if (parentData) + assert(parentData.children.includes(afterShortname), `Child ${afterShortname} not found`) } } else { const fileContent = fs.readFileSync(path.join(after, 'index.md'), 'utf-8') @@ -41,10 +42,11 @@ async function main(nameTuple: [string, string]) { if (data) assert(data.redirect_from.includes(oldHref), `Redirect from ${oldHref} not found`) { const parentIndexMd = path.join(path.dirname(after), 'index.md') - const fileContent = fs.readFileSync(parentIndexMd, 'utf-8') - const { data } = readFrontmatter(fileContent) + const parentFileContent = fs.readFileSync(parentIndexMd, 'utf-8') + const { data: parentData } = readFrontmatter(parentFileContent) const afterShortname = `/${after.split('/').slice(-1)}` - if (data) assert(data.children.includes(afterShortname), `Child ${afterShortname} not found`) + if (parentData) + assert(parentData.children.includes(afterShortname), `Child ${afterShortname} not found`) } } } diff --git a/src/content-render/scripts/update-filepaths.ts b/src/content-render/scripts/update-filepaths.ts index 9ee8a696b8..221a1522dc 100755 --- a/src/content-render/scripts/update-filepaths.ts +++ b/src/content-render/scripts/update-filepaths.ts @@ -96,7 +96,7 @@ async function main(): Promise { async function processFile( file: string, slugger: GithubSlugger, - options: ScriptOptions, + scriptOptions: ScriptOptions, ): Promise { const { data } = frontmatter(fs.readFileSync(file, 'utf8')) as unknown as { data: PageFrontmatter @@ -105,7 +105,7 @@ async function processFile( const isDirectory = isDirectoryCheck(file) // Assess the frontmatter and other conditions to determine if we want to process the path. - const processPage: boolean = determineProcessStatus(data, isDirectory, options) + const processPage: boolean = determineProcessStatus(data, isDirectory, scriptOptions) if (!processPage) return null let stringToSlugify: string = data.shortTitle || data.title @@ -153,10 +153,10 @@ async function processFile( return [contentPath, newContentPath] } -function moveFile(result: string[], options: ScriptOptions): void { +function moveFile(result: string[], scriptOptions: ScriptOptions): void { const [contentPath, newContentPath] = result - if (options.dryRun) { + if (scriptOptions.dryRun) { console.log('Move:\n', contentPath, '\nto:\n', newContentPath, '\n') return } @@ -214,7 +214,7 @@ function sortFiles(filesArray: string[]): string[] { }) } -function filterFiles(contentDir: string, options: ScriptOptions) { +function filterFiles(contentDir: string, scriptOptions: ScriptOptions) { return walkFiles(contentDir, ['.md']).filter((file: string) => { // Never move readmes if (file.endsWith('README.md')) return false @@ -226,9 +226,9 @@ function filterFiles(contentDir: string, options: ScriptOptions) { if (path.relative(contentDir, file).split(path.sep)[1] === 'index.md') return false // If no specific paths are passed, we are done filtering. - if (!options.paths) return true + if (!scriptOptions.paths) return true - return options.paths.some((p: string) => { + return scriptOptions.paths.some((p: string) => { // Allow either a full content path like "content/foo/bar.md" // or a top-level directory name like "copilot" if (!p.startsWith('content')) { @@ -247,15 +247,15 @@ function filterFiles(contentDir: string, options: ScriptOptions) { function determineProcessStatus( data: PageFrontmatter, isDirectory: boolean, - options: ScriptOptions, + scriptOptions: ScriptOptions, ): boolean { // Assess the conditions in this order: // If it's a directory AND we're excluding dirs, do not process it no matter what. - if (isDirectory && options.excludeDirs) { + if (isDirectory && scriptOptions.excludeDirs) { return false } // If the force option is passed, process it no matter what. - if (options.force) { + if (scriptOptions.force) { return true } // If the page has the override set, do not process it. diff --git a/src/content-render/tests/annotate.ts b/src/content-render/tests/annotate.ts index bd1b3be982..03a3f50b56 100644 --- a/src/content-render/tests/annotate.ts +++ b/src/content-render/tests/annotate.ts @@ -66,7 +66,7 @@ describe('annotate', () => { }) test('renders bash with hash bang annotations', async () => { - const example = ` + const bashExample = ` \`\`\`bash annotate # The next line is the hash bang #!/usr/bin/env bash @@ -75,11 +75,11 @@ describe('annotate', () => { echo "Hello, world!" \`\`\` `.trim() - const res = await renderContent(example) + const res = await renderContent(bashExample) const $ = cheerio.load(res) const headerCode = $('header pre').text() - expect(headerCode).toMatch(example.split('\n').slice(1, -1).join('\n')) + expect(headerCode).toMatch(bashExample.split('\n').slice(1, -1).join('\n')) const rows = $('.annotate-row') const notes = $('.annotate-note p', rows) const noteTexts = notes.map((i, el) => $(el).text()).get() @@ -90,7 +90,7 @@ echo "Hello, world!" }) test("doesn't complain if the first comment is empty", async () => { - const example = ` + const emptyCommentExample = ` \`\`\`yaml annotate copy # name: Create and publish a Docker image @@ -103,11 +103,11 @@ on: \`\`\` `.trim() - const res = await renderContent(example) + const res = await renderContent(emptyCommentExample) const $ = cheerio.load(res) const headerCode = $('header pre').text() - expect(headerCode).toMatch(example.split('\n').slice(1, -1).join('\n')) + expect(headerCode).toMatch(emptyCommentExample.split('\n').slice(1, -1).join('\n')) const rows = $('.annotate-row') const notes = $('.annotate-note p', rows) const noteTexts = notes.map((i, el) => $(el).text()).get() @@ -121,7 +121,7 @@ on: }) test('supports AUTOTITLE links in annotations', async () => { - const example = ` + const autotitleExample = ` \`\`\`yaml annotate copy # For more information about workflow syntax, see [AUTOTITLE](/get-started/start-your-journey/hello-world). name: Test workflow @@ -151,7 +151,7 @@ on: [push] // Mock test object doesn't need all Context properties, using 'as unknown as' to bypass strict type checking } as unknown as Context - const res = await renderContent(example, mockContext) + const res = await renderContent(autotitleExample, mockContext) const $ = cheerio.load(res) const rows = $('.annotate-row') diff --git a/src/content-render/unified/annotate.ts b/src/content-render/unified/annotate.ts index 02f6a9331d..bca73be290 100644 --- a/src/content-render/unified/annotate.ts +++ b/src/content-render/unified/annotate.ts @@ -122,8 +122,8 @@ function createAnnotatedNode(node: ElementNode, context: any): any { const rows = chunk(groups, 2) // Check the rows are formatted correctly - for (const [note, code] of rows) { - if (note === undefined || code === undefined) { + for (const [note, codeBlock] of rows) { + if (note === undefined || codeBlock === undefined) { throw new Error( "Each annotation must have a note and a code block. If you're trying to create a blank annotation, you can use a single line comment with a space after it.", ) @@ -231,13 +231,13 @@ function template({ h( 'div', { className: 'annotate-beside' }, - rows.map(([note, code]) => + rows.map(([note, codeBlock]) => h('div', { className: 'annotate-row' }, [ h( 'div', { className: 'annotate-code' }, // pre > code matches the mdast -> hast tree of a regular fenced code block. - h('pre', h('code', { className: `language-${lang}` }, code.join('\n'))), + h('pre', h('code', { className: `language-${lang}` }, codeBlock.join('\n'))), ), h( 'div', diff --git a/src/content-render/unified/code-header.ts b/src/content-render/unified/code-header.ts index 958c01b215..80c670dd31 100644 --- a/src/content-render/unified/code-header.ts +++ b/src/content-render/unified/code-header.ts @@ -108,8 +108,8 @@ function btnIcon(): Element { const btnIconHtml: string = octicons.copy.toSVG() const btnIconAst = parse(String(btnIconHtml), { sourceCodeLocationInfo: true }) // @ts-ignore - fromParse5 file option typing issue - const btnIcon = fromParse5(btnIconAst, { file: btnIconHtml }) - return btnIcon as Element + const btnIconElement = fromParse5(btnIconAst, { file: btnIconHtml }) + return btnIconElement as Element } // Using any due to conflicting unist/hast type definitions between dependencies diff --git a/src/content-render/unified/copilot-prompt.ts b/src/content-render/unified/copilot-prompt.ts index 1874b4aec9..5def4ee140 100644 --- a/src/content-render/unified/copilot-prompt.ts +++ b/src/content-render/unified/copilot-prompt.ts @@ -85,6 +85,6 @@ function findMatchingCode(ref: string, tree: any): any { function copilotIcon(): any { const copilotIconHtml = octicons.copilot.toSVG() const copilotIconAst = parse(String(copilotIconHtml), { sourceCodeLocationInfo: true }) - const copilotIcon = fromParse5(copilotIconAst, { file: copilotIconHtml }) - return copilotIcon + const copilotIconElement = fromParse5(copilotIconAst, { file: copilotIconHtml }) + return copilotIconElement } diff --git a/src/content-render/unified/rewrite-local-links.ts b/src/content-render/unified/rewrite-local-links.ts index c33e7e512b..53687ffdd3 100644 --- a/src/content-render/unified/rewrite-local-links.ts +++ b/src/content-render/unified/rewrite-local-links.ts @@ -185,14 +185,14 @@ function processLinkNode(node: Link, language: string, version: string, nodes: N language === 'en' ) { // Throw if the link text *almost* is AUTOTITLE - const textChild = child as Text + const childText = child as Text if ( - textChild.value.toUpperCase() === 'AUTOTITLE' || - distance(textChild.value.toUpperCase(), 'AUTOTITLE') <= 2 + childText.value.toUpperCase() === 'AUTOTITLE' || + distance(childText.value.toUpperCase(), 'AUTOTITLE') <= 2 ) { throw new Error( - `Found link text '${textChild.value}', expected 'AUTOTITLE'. ` + - `Find the mention of the link text '${textChild.value}' and change it to 'AUTOTITLE'. Case matters.`, + `Found link text '${childText.value}', expected 'AUTOTITLE'. ` + + `Find the mention of the link text '${childText.value}' and change it to 'AUTOTITLE'. Case matters.`, ) } } diff --git a/src/early-access/scripts/migrate-early-access-product.ts b/src/early-access/scripts/migrate-early-access-product.ts index a242feeab1..a2b253bcc9 100644 --- a/src/early-access/scripts/migrate-early-access-product.ts +++ b/src/early-access/scripts/migrate-early-access-product.ts @@ -177,23 +177,23 @@ function moveVariable(dataRef: string): void { const nonAltPath: string = newVariablePath.replace('-alt.yml', '.yml') const oldAltPath: string = oldVariablePath.replace('.yml', '-alt.yml') - let oldPath: string = oldVariablePath + let oldVariableFinalPath: string = oldVariablePath // If the old variable path doesn't exist, assume no migration needed. - if (!fs.existsSync(oldVariablePath)) { + if (!fs.existsSync(oldVariableFinalPath)) { if (!fs.existsSync(newVariablePath)) { console.log(`Problem migrating files for ${dataRef}`) return } if (fs.existsSync(oldAltPath)) { - oldPath = oldAltPath + oldVariableFinalPath = oldAltPath } else { return } } const variableFileContent: Record = yaml.load( - fs.readFileSync(oldPath, 'utf8'), + fs.readFileSync(oldVariableFinalPath, 'utf8'), ) as Record const value: any = variableFileContent[variableKey] diff --git a/src/events/components/Survey.tsx b/src/events/components/Survey.tsx index 9beac8fe77..447740d347 100644 --- a/src/events/components/Survey.tsx +++ b/src/events/components/Survey.tsx @@ -59,10 +59,10 @@ export const Survey = () => { } }, [state]) - function vote(vote: VoteState) { + function vote(userVote: VoteState) { return () => { - trackEvent(getEventData(vote === VoteState.YES)) - setVoteState(vote) + trackEvent(getEventData(userVote === VoteState.YES)) + setVoteState(userVote) } } @@ -93,9 +93,9 @@ export const Survey = () => { setComment('') } - function getEventData(vote: boolean): EventData { + function getEventData(voteValue: boolean): EventData { return { - vote, + vote: voteValue, comment, email, token, diff --git a/src/events/components/experiments/useShouldShowExperiment.ts b/src/events/components/experiments/useShouldShowExperiment.ts index 490b9133cf..28211883f6 100644 --- a/src/events/components/experiments/useShouldShowExperiment.ts +++ b/src/events/components/experiments/useShouldShowExperiment.ts @@ -46,13 +46,13 @@ export function useShouldShowExperiment(experimentKey: ExperimentNames | { key: useEffect(() => { const updateShouldShow = async () => { - const isStaff = await getIsStaff() + const staffStatus = await getIsStaff() setShowExperiment( shouldShowExperiment( experimentKey, router.locale || '', mainContext.currentVersion || '', - isStaff, + staffStatus, router.query, ), ) diff --git a/src/events/lib/analyze-comment.ts b/src/events/lib/analyze-comment.ts index d27de5ccee..931b017829 100644 --- a/src/events/lib/analyze-comment.ts +++ b/src/events/lib/analyze-comment.ts @@ -48,17 +48,20 @@ export const SIGNAL_RATINGS = [ { reduction: 0.2, name: 'not-language', - validator: (comment: string, language: string) => isNotLanguage(comment, language), + validator: (comment: string, commentLanguage: string) => + isNotLanguage(comment, commentLanguage), }, { reduction: 0.3, name: 'cuss-words-likely', - validator: (comment: string, language: string) => isLikelyCussWords(comment, language), + validator: (comment: string, commentLanguage: string) => + isLikelyCussWords(comment, commentLanguage), }, { reduction: 0.1, name: 'cuss-words-maybe', - validator: (comment: string, language: string) => isMaybeCussWords(comment, language), + validator: (comment: string, commentLanguage: string) => + isMaybeCussWords(comment, commentLanguage), }, { reduction: 0.2, @@ -91,11 +94,11 @@ export async function getGuessedLanguage(comment: string) { return bestGuess.alpha2 || undefined } -export async function analyzeComment(text: string, language = 'en') { +export async function analyzeComment(text: string, commentLanguage = 'en') { const signals = [] let rating = 1.0 for (const { reduction, name, validator } of SIGNAL_RATINGS) { - if (validator(text, language)) { + if (validator(text, commentLanguage)) { signals.push(name) rating -= reduction } diff --git a/src/events/tests/middleware-errors.ts b/src/events/tests/middleware-errors.ts index 8da3ed798b..2f139590d3 100644 --- a/src/events/tests/middleware-errors.ts +++ b/src/events/tests/middleware-errors.ts @@ -10,9 +10,9 @@ describe('formatErrors', () => { const { errors } = validateJson({ type: 'string' }, 0) const formattedErrors = formatErrors(errors || [], '') for (const formatted of formattedErrors) { - const { isValid, errors } = validateJson(schemas.validation, formatted) + const { isValid, errors: validationErrors } = validateJson(schemas.validation, formatted) if (!isValid) { - throw new Error(errors?.map((e) => e.message).join(' -- ')) + throw new Error(validationErrors?.map((e) => e.message).join(' -- ')) } } }) diff --git a/src/fixtures/helpers/turn-off-experiments.ts b/src/fixtures/helpers/turn-off-experiments.ts index 9767310f3e..cd4065eb79 100644 --- a/src/fixtures/helpers/turn-off-experiments.ts +++ b/src/fixtures/helpers/turn-off-experiments.ts @@ -27,11 +27,11 @@ async function alterExperimentsInPage( } for (const experiment of getActiveExperiments('all')) { await page.evaluate( - ({ experimentKey, variation }) => { + ({ experimentKey, variationType }) => { // @ts-expect-error overrideControlGroup is a custom function added to the window object - window.overrideControlGroup(experimentKey, variation) + window.overrideControlGroup(experimentKey, variationType) }, - { experimentKey: experiment.key, variation }, + { experimentKey: experiment.key, variationType: variation }, ) } } diff --git a/src/fixtures/tests/liquid.ts b/src/fixtures/tests/liquid.ts index 21208f1e75..545bdfc879 100644 --- a/src/fixtures/tests/liquid.ts +++ b/src/fixtures/tests/liquid.ts @@ -78,13 +78,13 @@ describe('post', () => { // Test what happens to `Cram{% ifversion fpt %}FPT{% endif %}ped.` // when it's not free-pro-team. { - const $: cheerio.Root = await getDOM( + const $inner: cheerio.Root = await getDOM( '/enterprise-server@latest/get-started/liquid/whitespace', ) - const html = $('#article-contents').html() + const innerHtml = $inner('#article-contents').html() // Assures that there's not whitespace left when the `{% ifversion %}` // yields an empty string. - expect(html).toMatch('Cramped') + expect(innerHtml).toMatch('Cramped') } }) }) diff --git a/src/frame/components/UtmPreserver.tsx b/src/frame/components/UtmPreserver.tsx index 987df9fd74..6e829c7068 100644 --- a/src/frame/components/UtmPreserver.tsx +++ b/src/frame/components/UtmPreserver.tsx @@ -36,11 +36,11 @@ export const UtmPreserver = () => { } // Add UTM parameters to a URL - const addUtmParamsToUrl = (url: string, utmParams: URLSearchParams): string => { + const addUtmParamsToUrl = (url: string, params: URLSearchParams): string => { try { const urlObj = new URL(url) - for (const [key, value] of utmParams) { + for (const [key, value] of params) { urlObj.searchParams.set(key, value) } diff --git a/src/frame/lib/create-tree.ts b/src/frame/lib/create-tree.ts index 8f47312a9d..bd0e7dff6e 100644 --- a/src/frame/lib/create-tree.ts +++ b/src/frame/lib/create-tree.ts @@ -34,9 +34,9 @@ export default async function createTree( // wrong. try { mtime = await getMtime(filepath) - } catch (error) { - if ((error as NodeJS.ErrnoException).code !== 'ENOENT') { - throw error + } catch (innerError) { + if ((innerError as NodeJS.ErrnoException).code !== 'ENOENT') { + throw innerError } // Throw an error if we can't find a content file associated with the children: entry. // But don't throw an error if the user is running the site locally and hasn't cloned the Early Access repo. diff --git a/src/frame/lib/page-data.ts b/src/frame/lib/page-data.ts index 560fb57ac9..e301bd223e 100644 --- a/src/frame/lib/page-data.ts +++ b/src/frame/lib/page-data.ts @@ -391,11 +391,11 @@ export const loadPages = loadPageList // Create an object from the list of all pages with permalinks as keys for fast lookup. export function createMapFromArray(pageList: Page[]): Record { const pageMap = pageList.reduce( - (pageMap: Record, page: Page) => { + (accumulatedMap: Record, page: Page) => { for (const permalink of page.permalinks) { - pageMap[permalink.href] = page + accumulatedMap[permalink.href] = page } - return pageMap + return accumulatedMap }, {} as Record, ) diff --git a/src/frame/lib/read-json-file.ts b/src/frame/lib/read-json-file.ts index 44b6758fd2..1c8ebe6d1f 100644 --- a/src/frame/lib/read-json-file.ts +++ b/src/frame/lib/read-json-file.ts @@ -58,12 +58,12 @@ export function readCompressedJsonFileFallbackLazily(xpath: string): () => any { if (err.code === 'ENOENT') { try { fs.accessSync(`${xpath}.br`) - } catch (err: any) { - // err is any because fs errors can have various shapes with code property - if (err.code === 'ENOENT') { + } catch (innerErr: any) { + // innerErr is any because fs errors can have various shapes with code property + if (innerErr.code === 'ENOENT') { throw new Error(`Neither ${xpath} nor ${xpath}.br is accessible`) } - throw err + throw innerErr } } else { throw err diff --git a/src/frame/middleware/app-router-gateway.ts b/src/frame/middleware/app-router-gateway.ts index 1c2318277b..b84e4f3f5b 100644 --- a/src/frame/middleware/app-router-gateway.ts +++ b/src/frame/middleware/app-router-gateway.ts @@ -68,10 +68,10 @@ export default function appRouterGateway(req: ExtendedRequest, res: Response, ne if (shouldUseAppRouter(path, pageFound)) { console.log(`[INFO] Using App Router for path: ${path} (pageFound: ${!!pageFound})`) - const strippedPath = stripLocalePrefix(path) + const innerStrippedPath = stripLocalePrefix(path) // For 404 routes, always route to our 404 page - if (strippedPath === '/404' || strippedPath === '/_not-found' || !pageFound) { + if (innerStrippedPath === '/404' || innerStrippedPath === '/_not-found' || !pageFound) { req.url = '/404' res.status(404) defaultCacheControl(res) diff --git a/src/frame/middleware/context/breadcrumbs.ts b/src/frame/middleware/context/breadcrumbs.ts index 166d125fd3..5a4659588f 100644 --- a/src/frame/middleware/context/breadcrumbs.ts +++ b/src/frame/middleware/context/breadcrumbs.ts @@ -44,13 +44,13 @@ function getBreadcrumbs(req: ExtendedRequest, isEarlyAccess: boolean) { } } - const breadcrumbs = traverseTreeTitles( + const breadcrumbsResult = traverseTreeTitles( req.context.currentPath, req.context.currentProductTreeTitles, ) - ;[...Array(cutoff)].forEach(() => breadcrumbs.shift()) + ;[...Array(cutoff)].forEach(() => breadcrumbsResult.shift()) - return breadcrumbs + return breadcrumbsResult } // Return an array as if you'd traverse down a tree. Imagine a tree like diff --git a/src/frame/middleware/context/context.ts b/src/frame/middleware/context/context.ts index ef4caa955a..8fa16fc2e2 100644 --- a/src/frame/middleware/context/context.ts +++ b/src/frame/middleware/context/context.ts @@ -91,20 +91,20 @@ export default async function contextualize( // The reason this is a function is because most of the time, we don't // need to know the English equivalent. It only comes into play if a // translated - req.context.getEnglishPage = (context) => { - if (!context.enPage) { - const { page } = context + req.context.getEnglishPage = (ctx) => { + if (!ctx.enPage) { + const { page } = ctx if (!page) { throw new Error("The 'page' has not been put into the context yet.") } - const enPath = context.currentPath!.replace(`/${page.languageCode}`, '/en') - const enPage = context.pages![enPath] + const enPath = ctx.currentPath!.replace(`/${page.languageCode}`, '/en') + const enPage = ctx.pages![enPath] if (!enPage) { throw new Error(`Unable to find equivalent English page by the path '${enPath}'`) } - context.enPage = enPage + ctx.enPage = enPage } - return context.enPage + return ctx.enPage } } diff --git a/src/frame/middleware/context/glossaries.ts b/src/frame/middleware/context/glossaries.ts index 047bf246e6..d76d34d4a6 100644 --- a/src/frame/middleware/context/glossaries.ts +++ b/src/frame/middleware/context/glossaries.ts @@ -41,7 +41,7 @@ export default async function glossaries(req: ExtendedRequest, res: Response, ne 'glossaries.external', req.context.currentLanguage!, ) - const glossaries = ( + const glossariesList = ( await Promise.all( glossariesRaw.map(async (glossary) => { let { description } = glossary @@ -80,7 +80,7 @@ export default async function glossaries(req: ExtendedRequest, res: Response, ne ) ).filter(Boolean) - req.context.glossaries = glossaries.sort((a, b) => + req.context.glossaries = glossariesList.sort((a, b) => a.term.localeCompare(b.term, req.context!.currentLanguage), ) diff --git a/src/frame/middleware/index.ts b/src/frame/middleware/index.ts index 929d12a099..eb69e7acff 100644 --- a/src/frame/middleware/index.ts +++ b/src/frame/middleware/index.ts @@ -82,11 +82,11 @@ const asyncMiddleware = ( fn: (req: TReq, res: Response, next: NextFunction) => T | Promise, ) => - async (req: Request, res: Response, next: NextFunction) => { + async (req: Request, res: Response, nextFn: NextFunction) => { try { - await fn(req as TReq, res, next) + await fn(req as TReq, res, nextFn) } catch (error) { - next(error) + nextFn(error) } } diff --git a/src/frame/middleware/next.ts b/src/frame/middleware/next.ts index 5ee2f41f93..ebe123a365 100644 --- a/src/frame/middleware/next.ts +++ b/src/frame/middleware/next.ts @@ -11,7 +11,7 @@ export const nextApp = next({ dev: isDevelopment }) export const nextHandleRequest = nextApp.getRequestHandler() await nextApp.prepare() -function renderPageWithNext(req: ExtendedRequest, res: Response, next: NextFunction) { +function renderPageWithNext(req: ExtendedRequest, res: Response, nextFn: NextFunction) { if (req.path.startsWith('/_next') && !req.path.startsWith('/_next/data')) { return nextHandleRequest(req, res) } @@ -20,7 +20,7 @@ function renderPageWithNext(req: ExtendedRequest, res: Response, next: NextFunct // '/_next/static/webpack/64e44ef62e261d3a.webpack.hot-update.json' has to // go through here. - return next() + return nextFn() } export default renderPageWithNext diff --git a/src/frame/tests/manifest.ts b/src/frame/tests/manifest.ts index aff2fd9f79..7a2d27d794 100644 --- a/src/frame/tests/manifest.ts +++ b/src/frame/tests/manifest.ts @@ -42,11 +42,11 @@ describe('manifest', () => { expect(manifest.icons.length).toBeGreaterThan(0) await Promise.all( manifest.icons.map(async (icon) => { - const res = await get(icon.src, { responseType: 'buffer' }) - expect(res.statusCode).toBe(200) - expect(res.headers['content-type']).toBe(icon.type) + const iconRes = await get(icon.src, { responseType: 'buffer' }) + expect(iconRes.statusCode).toBe(200) + expect(iconRes.headers['content-type']).toBe(icon.type) // The `sizes` should match the payload - const image = sharp(res.body) + const image = sharp(iconRes.body) const [width, height] = icon.sizes.split('x').map((s) => parseInt(s)) const dimensions = await image.metadata() expect(dimensions.width).toBe(width) diff --git a/src/frame/tests/pages.ts b/src/frame/tests/pages.ts index 99d000e884..99bddf0542 100644 --- a/src/frame/tests/pages.ts +++ b/src/frame/tests/pages.ts @@ -75,7 +75,7 @@ describe('pages module', () => { // Only consider as duplicate if more than one unique file defines the same redirect const duplicates = Array.from(redirectToFiles.entries()) .filter(([, files]) => files.size > 1) - .map(([path]) => path) + .map(([redirectPath]) => redirectPath) // Build a detailed message with sources for each duplicate const message = `Found ${duplicates.length} duplicate redirect_from path${duplicates.length === 1 ? '' : 's'}. diff --git a/src/github-apps/scripts/sync.ts b/src/github-apps/scripts/sync.ts index 01a6cfa914..47ceb5a6d7 100755 --- a/src/github-apps/scripts/sync.ts +++ b/src/github-apps/scripts/sync.ts @@ -126,7 +126,7 @@ export async function syncGitHubAppsData( const { progAccessData, progActorResources } = await getProgAccessData(progAccessSource) for (const schemaName of sourceSchemas) { - const data = JSON.parse( + const schemaData = JSON.parse( await readFile(path.join(openApiSource, schemaName), 'utf8'), ) as OpenApiData const appsDataConfig = JSON.parse(await readFile(CONFIG_FILE, 'utf8')) as AppsDataConfig @@ -138,7 +138,7 @@ export async function syncGitHubAppsData( } // Because the information used on the apps page doesn't require any // rendered content we can parse the dereferenced files directly - for (const [requestPath, operationsAtPath] of Object.entries(data.paths)) { + for (const [requestPath, operationsAtPath] of Object.entries(schemaData.paths)) { for (const [verb, operation] of Object.entries(operationsAtPath)) { // We only want to process operations that have programmatic access data if (!progAccessData[operation.operationId]) continue @@ -491,17 +491,17 @@ export function shouldFilterMetadataPermission( export function isActorExcluded( excludedActors: string[] | undefined | null | unknown, actorType: string, - actorTypeMap: Record = {}, + actorMapping: Record = {}, ): boolean { if (!excludedActors || !Array.isArray(excludedActors)) { return false } // Map generic actor type to actual YAML value if mapping exists - const actualActorType = actorTypeMap[actorType] || actorType + const mappedActorType = actorMapping[actorType] || actorType // Check if the mapped actor type is excluded - if (excludedActors.includes(actualActorType)) { + if (excludedActors.includes(mappedActorType)) { return true } @@ -571,7 +571,7 @@ async function getProgActorResourceContent({ owner, repo, branch, - path, + path: resourcePath, gitHubSourceDirectory = null, }: ProgActorResourceContentOptions): Promise { // Get files either locally from disk or from the GitHub remote repo @@ -579,7 +579,7 @@ async function getProgActorResourceContent({ if (gitHubSourceDirectory) { files = await getProgActorContentFromDisk(gitHubSourceDirectory) } else { - files = await getDirectoryContents(owner!, repo!, branch!, path!) + files = await getDirectoryContents(owner!, repo!, branch!, resourcePath!) } // We need to format the file content into a single object. Each file diff --git a/src/graphql/components/Changelog.tsx b/src/graphql/components/Changelog.tsx index 64550eee20..9976d8e095 100644 --- a/src/graphql/components/Changelog.tsx +++ b/src/graphql/components/Changelog.tsx @@ -20,8 +20,8 @@ export function Changelog({ changelogItems }: Props) {

{change.title}

    - {change.changes.map((change) => ( -
  • + {change.changes.map((changeItem) => ( +
  • ))}
@@ -30,8 +30,8 @@ export function Changelog({ changelogItems }: Props) {

{change.title}

    - {change.changes.map((change) => ( -
  • + {change.changes.map((changeItem) => ( +
  • ))}
@@ -39,8 +39,8 @@ export function Changelog({ changelogItems }: Props) { {(item.upcomingChanges || []).map((change, index) => (

{change.title}

- {change.changes.map((change) => ( -
  • + {change.changes.map((changeItem) => ( +
  • ))} ))} diff --git a/src/graphql/scripts/utils/process-schemas.ts b/src/graphql/scripts/utils/process-schemas.ts index b2a2c95edd..fe8aa867e3 100755 --- a/src/graphql/scripts/utils/process-schemas.ts +++ b/src/graphql/scripts/utils/process-schemas.ts @@ -389,10 +389,10 @@ export default async function processSchemas( } await Promise.all( - mutationReturnFields.fields!.map(async (field: FieldDefinitionNode) => { + mutationReturnFields.fields!.map(async (returnFieldDef: FieldDefinitionNode) => { const returnField: Partial = {} - returnField.name = field.name.value - const fieldType = helpers.getType(field) + returnField.name = returnFieldDef.name.value + const fieldType = helpers.getType(returnFieldDef) if (!fieldType) return returnField.type = fieldType returnField.id = helpers.getId(returnField.type) diff --git a/src/graphql/scripts/utils/schema-helpers.ts b/src/graphql/scripts/utils/schema-helpers.ts index 0bd516f1e2..846e9c6230 100644 --- a/src/graphql/scripts/utils/schema-helpers.ts +++ b/src/graphql/scripts/utils/schema-helpers.ts @@ -128,8 +128,8 @@ function getFullLink(baseType: string, id: string): string { return `/graphql/reference/${baseType}#${id}` } -function getId(path: string): string { - return removeMarkers(path).toLowerCase() +function getId(typeName: string): string { + return removeMarkers(typeName).toLowerCase() } // e.g., given `ObjectTypeDefinition`, get `objects` diff --git a/src/landings/components/ProductSelectionCard.tsx b/src/landings/components/ProductSelectionCard.tsx index 415d217cc9..8db2f8d4eb 100644 --- a/src/landings/components/ProductSelectionCard.tsx +++ b/src/landings/components/ProductSelectionCard.tsx @@ -45,18 +45,18 @@ export const ProductSelectionCard = ({ group }: ProductSelectionCardProps) => { height: '22px', } - function icon(group: ProductGroupT) { - if (group.icon) { + function icon(productGroup: ProductGroupT) { + if (productGroup.icon) { return (
    - {group.name} + {productGroup.name}
    ) - } else if (group.octicon) { - const octicon: React.FunctionComponent = octiconMap[group.octicon] + } else if (productGroup.octicon) { + const octicon: React.FunctionComponent = octiconMap[productGroup.octicon] if (!octicon) { - throw new Error(`Octicon ${group.octicon} not found`) + throw new Error(`Octicon ${productGroup.octicon} not found`) } return ( diff --git a/src/languages/lib/languages-server.ts b/src/languages/lib/languages-server.ts index 97e41f2ff2..61dff66d6b 100644 --- a/src/languages/lib/languages-server.ts +++ b/src/languages/lib/languages-server.ts @@ -90,8 +90,8 @@ export const languagePrefixPathRegex: RegExp = new RegExp(`^/(${languageKeys.joi * if it's something like /foo or /foo/bar or /fr (because French (fr) * is currently not an active language) */ -export function pathLanguagePrefixed(path: string): boolean { - return languagePrefixPathRegex.test(path) +export function pathLanguagePrefixed(urlPath: string): boolean { + return languagePrefixPathRegex.test(urlPath) } export default languages diff --git a/src/languages/scripts/purge-fastly-edge-cache-per-language.ts b/src/languages/scripts/purge-fastly-edge-cache-per-language.ts index bb4f2fa175..df987328d0 100644 --- a/src/languages/scripts/purge-fastly-edge-cache-per-language.ts +++ b/src/languages/scripts/purge-fastly-edge-cache-per-language.ts @@ -41,14 +41,14 @@ for (const language of languages) { } function languagesFromString(str: string): string[] { - const languages = str + const parsedLanguages = str .split(/,/) .map((x) => x.trim()) .filter(Boolean) - if (!languages.every((lang) => languageKeys.includes(lang))) { + if (!parsedLanguages.every((lang) => languageKeys.includes(lang))) { throw new Error( - `Unrecognized language code (${languages.find((lang) => !languageKeys.includes(lang))})`, + `Unrecognized language code (${parsedLanguages.find((lang) => !languageKeys.includes(lang))})`, ) } - return languages + return parsedLanguages } diff --git a/src/languages/tests/frame.ts b/src/languages/tests/frame.ts index 55122e69d7..fca75c4a18 100644 --- a/src/languages/tests/frame.ts +++ b/src/languages/tests/frame.ts @@ -96,7 +96,7 @@ describe('release notes', () => { // // This is useful because if we test every single individual version of // every plan the test just takes way too long. - const getReleaseNotesVersionCombinations = (langs: string[]) => { + const getReleaseNotesVersionCombinations = (languages: string[]) => { const combinations = [] const prefixes: string[] = [] for (const version of page!.applicableVersions) { @@ -105,7 +105,7 @@ describe('release notes', () => { continue } prefixes.push(prefix) - combinations.push(...langs.map((lang) => [lang, version])) + combinations.push(...languages.map((lang) => [lang, version])) } return combinations } diff --git a/src/languages/tests/translation-error-comments.ts b/src/languages/tests/translation-error-comments.ts index 26c163cb8c..19bf22a914 100644 --- a/src/languages/tests/translation-error-comments.ts +++ b/src/languages/tests/translation-error-comments.ts @@ -320,8 +320,8 @@ describe('Translation Error Comments', () => { } // Mock renderContent to simulate error for Japanese, success for English - mockRenderContent.mockImplementation((template: string, context: any) => { - if (context.currentLanguage !== 'en' && template.includes('badtag')) { + mockRenderContent.mockImplementation((template: string, innerContext: any) => { + if (innerContext.currentLanguage !== 'en' && template.includes('badtag')) { const error = new Error("Unknown tag 'badtag'") error.name = 'ParseError' ;(error as any).token = { @@ -330,7 +330,7 @@ describe('Translation Error Comments', () => { } throw error } - return context.currentLanguage === 'en' ? 'English Title' : template + return innerContext.currentLanguage === 'en' ? 'English Title' : template }) const result = await renderContentWithFallback(mockPage, 'rawTitle', context) @@ -357,8 +357,8 @@ describe('Translation Error Comments', () => { }, } - mockRenderContent.mockImplementation((template: string, context: any) => { - if (context.currentLanguage !== 'en' && template.includes('badtag')) { + mockRenderContent.mockImplementation((template: string, innerContext: any) => { + if (innerContext.currentLanguage !== 'en' && template.includes('badtag')) { const error = new Error("Unknown tag 'badtag'") error.name = 'ParseError' throw error diff --git a/src/learning-track/tests/lint-data.ts b/src/learning-track/tests/lint-data.ts index b7ef60d3e9..ef4f2a9ba1 100644 --- a/src/learning-track/tests/lint-data.ts +++ b/src/learning-track/tests/lint-data.ts @@ -57,10 +57,10 @@ describe('learning tracks', () => { let fixables = 0 for (const [key, guides] of troubles) { errorMessage += `Under "${key}"...\n` - for (const { uri, index, redirects } of guides) { - if (redirects) { + for (const { uri, index, redirects: redirectTo } of guides) { + if (redirectTo) { fixables += 1 - errorMessage += ` guide: #${index + 1} ${uri} redirects to ${redirects}\n` + errorMessage += ` guide: #${index + 1} ${uri} redirects to ${redirectTo}\n` } else { errorMessage += ` guide: #${index + 1} ${uri} is broken.\n` } diff --git a/src/links/components/LinkPreviewPopover.tsx b/src/links/components/LinkPreviewPopover.tsx index 75f26b466a..47b5e2d32b 100644 --- a/src/links/components/LinkPreviewPopover.tsx +++ b/src/links/components/LinkPreviewPopover.tsx @@ -284,8 +284,8 @@ function fillPopover( const regex = /^\/(?\w{2}\/)?(?[\w-]+@[\w-.]+\/)?(?[\w-]+\/)?/ const match = regex.exec(linkURL.pathname) if (match?.groups) { - const { lang, version, product } = match.groups - const productURL = [lang, version, product].map((n) => n || '').join('') + const { lang, version, product: productPath } = match.groups + const productURL = [lang, version, productPath].map((n) => n || '').join('') productHeadLink.href = `${linkURL.origin}/${productURL}` } productHead.style.display = 'block' diff --git a/src/links/scripts/rendered-content-link-checker.ts b/src/links/scripts/rendered-content-link-checker.ts index e7b12b1af5..bea6ed6b25 100755 --- a/src/links/scripts/rendered-content-link-checker.ts +++ b/src/links/scripts/rendered-content-link-checker.ts @@ -483,8 +483,8 @@ async function commentOnPR(core: CoreInject, octokit: Octokit, flaws: LinkFlaw[] issue_number: pullNumber, }) let previousCommentId - for (const { body, id } of data) { - if (body && body.includes(findAgainSymbol)) { + for (const { body: commentBody, id } of data) { + if (commentBody && commentBody.includes(findAgainSymbol)) { previousCommentId = id } } diff --git a/src/links/scripts/validate-github-github-docs-urls/generate-new-json.ts b/src/links/scripts/validate-github-github-docs-urls/generate-new-json.ts index 20ad6e3ab5..c30ebaef2e 100644 --- a/src/links/scripts/validate-github-github-docs-urls/generate-new-json.ts +++ b/src/links/scripts/validate-github-github-docs-urls/generate-new-json.ts @@ -20,7 +20,7 @@ export function generateNewJSON( let countChanges = 0 for (const [identifier, url] of Object.entries(destination)) { - const check = checks.find((check) => check.identifier === identifier) + const check = checks.find((foundCheck) => foundCheck.identifier === identifier) if (check) { // At the moment, the only possible correction is if the URL is // found but required a redirect. diff --git a/src/metrics/scripts/docstat.ts b/src/metrics/scripts/docstat.ts index 72ca73dcc5..2e291ccc4a 100644 --- a/src/metrics/scripts/docstat.ts +++ b/src/metrics/scripts/docstat.ts @@ -412,8 +412,8 @@ try { // Given input: https://docs.github.com/en/copilot/managing-copilot/ // Use: copilot/managing-copilot -function getCleanPath(providedPath: string): string { - let clean = providedPath +function getCleanPath(inputPath: string): string { + let clean = inputPath const cleanArr = clean.split('?') // remove query params if (cleanArr.length > 1) cleanArr.pop() clean = cleanArr.join('/') @@ -431,29 +431,29 @@ function getCleanPath(providedPath: string): string { return clean } -function getVersion(cleanPath: string): string { - const pathParts = cleanPath.split('/') - const version = ENTERPRISE_REGEX.test(pathParts[0]) ? pathParts[0] : FREE_PRO_TEAM - return version +function getVersion(pathToCheck: string): string { + const pathParts = pathToCheck.split('/') + const versionString = ENTERPRISE_REGEX.test(pathParts[0]) ? pathParts[0] : FREE_PRO_TEAM + return versionString } -function removeVersionSegment(cleanPath: string, version: string): string { - if (version === FREE_PRO_TEAM) return cleanPath - const pathParts = cleanPath.split('/') +function removeVersionSegment(pathToProcess: string, versionString: string): string { + if (versionString === FREE_PRO_TEAM) return pathToProcess + const pathParts = pathToProcess.split('/') pathParts.shift() if (!pathParts.length) return 'index' return pathParts.join('/') } // Try to find the path in the list of valid pages at https://docs.github.com/api/pagelist/en -async function validatePath(cleanPath: string, version: string): Promise { +async function validatePath(pathToValidate: string, versionToValidate: string): Promise { // Only Kusto uses 'index' for the homepage; the Docs API uses '/en' - const basePath = cleanPath === 'index' ? '' : cleanPath + const basePath = pathToValidate === 'index' ? '' : pathToValidate const pathToCheck = - version === FREE_PRO_TEAM + versionToValidate === FREE_PRO_TEAM ? path.join('/', 'en', basePath) - : path.join('/', 'en', version, basePath) + : path.join('/', 'en', versionToValidate, basePath) let data: string try { diff --git a/src/observability/middleware/handle-errors.ts b/src/observability/middleware/handle-errors.ts index dd637d91a1..4b1dcda563 100644 --- a/src/observability/middleware/handle-errors.ts +++ b/src/observability/middleware/handle-errors.ts @@ -141,9 +141,9 @@ async function handleError( // Report to Failbot AFTER responding to the user await logException(error, req) } - } catch (error) { - console.error('An error occurred in the error handling middleware!', error) - next(error) + } catch (handlingError) { + console.error('An error occurred in the error handling middleware!', handlingError) + next(handlingError) return } } diff --git a/src/observability/tests/get-automatic-request-logger.ts b/src/observability/tests/get-automatic-request-logger.ts index 1f66fe812f..9b05c169ef 100644 --- a/src/observability/tests/get-automatic-request-logger.ts +++ b/src/observability/tests/get-automatic-request-logger.ts @@ -116,7 +116,7 @@ describe('getAutomaticRequestLogger', () => { // Create a completely isolated test environment for each iteration const isolatedLogs: string[] = [] - const originalConsoleLog = console.log + const savedConsoleLog = console.log // Replace console.log with isolated capture console.log = vi.fn((message: string) => { @@ -174,7 +174,7 @@ describe('getAutomaticRequestLogger', () => { expect(isolatedLogs[0]).toContain(testCase.expectedInLog) } finally { // Always restore console.log - console.log = originalConsoleLog + console.log = savedConsoleLog } } }) @@ -281,7 +281,7 @@ describe('getAutomaticRequestLogger', () => { // Create isolated log capture for this specific test const isolatedLogs: string[] = [] - const originalConsoleLog = console.log + const savedConsoleLog = console.log console.log = vi.fn((message: string) => { isolatedLogs.push(message) @@ -299,7 +299,7 @@ describe('getAutomaticRequestLogger', () => { expect(isolatedLogs).toHaveLength(0) } finally { // Always restore console.log - console.log = originalConsoleLog + console.log = savedConsoleLog } }) diff --git a/src/redirects/lib/get-redirect.ts b/src/redirects/lib/get-redirect.ts index 99b1d75564..ff5c896400 100644 --- a/src/redirects/lib/get-redirect.ts +++ b/src/redirects/lib/get-redirect.ts @@ -312,14 +312,14 @@ function tryReplacements(prefix: string, suffix: string, context: Context): stri return undefined } - const test = (suffix: string): boolean => { + const test = (testSuffix: string): boolean => { // This is a generally broad search and replace and this particular // replacement has never been present in api documentation only enterprise // admin documentation, so we're excluding the REST api pages - if (suffix.includes('/rest')) { + if (testSuffix.includes('/rest')) { return false } - const candidateAsRedirect = prefix + suffix + const candidateAsRedirect = prefix + testSuffix const candidateAsURL = `/en${candidateAsRedirect}` return candidateAsRedirect in redirects || candidateAsURL in pages } diff --git a/src/release-notes/middleware/ghes-release-notes.ts b/src/release-notes/middleware/ghes-release-notes.ts index 0e3540ffa0..cebe47ab46 100644 --- a/src/release-notes/middleware/ghes-release-notes.ts +++ b/src/release-notes/middleware/ghes-release-notes.ts @@ -71,12 +71,10 @@ export default async function ghesReleaseNotesContext( // notes instead. enContext.ghesReleases = formatReleases(ghesReleaseNotes) - const matchedReleaseNotes = enContext.ghesReleases!.find( - (r) => r.version === requestedRelease, - ) - if (!matchedReleaseNotes) throw new Error('Release notes not found') - const currentReleaseNotes = matchedReleaseNotes.patches - return renderPatchNotes(currentReleaseNotes, enContext) + const enMatchedNotes = enContext.ghesReleases!.find((r) => r.version === requestedRelease) + if (!enMatchedNotes) throw new Error('Release notes not found') + const enCurrentNotes = enMatchedNotes.patches + return renderPatchNotes(enCurrentNotes, enContext) }, ) } finally { diff --git a/src/rest/lib/index.ts b/src/rest/lib/index.ts index 9702abf218..49a191a30b 100644 --- a/src/rest/lib/index.ts +++ b/src/rest/lib/index.ts @@ -120,7 +120,7 @@ export async function getRestMiniTocItems( category: string, subCategory: string, apiVersion: string | undefined, - restOperations: Operation[], + operations: Operation[], language: string, version: string, context: Context, @@ -148,7 +148,7 @@ export async function getRestMiniTocItems( const categoryData = apiData.get(category)! if (!categoryData.get(subCategory)) { - const titles = restOperations.map((operation: Operation) => operation.title) + const titles = operations.map((operation: Operation) => operation.title) const restOperationsMiniTocItems = await getAutomatedPageMiniTocItems(titles, context, 3) categoryData.set(subCategory, { restOperationsMiniTocItems, diff --git a/src/rest/scripts/test-open-api-schema.ts b/src/rest/scripts/test-open-api-schema.ts index bf006936a5..4efddd638f 100755 --- a/src/rest/scripts/test-open-api-schema.ts +++ b/src/rest/scripts/test-open-api-schema.ts @@ -163,5 +163,5 @@ function difference(obj1: Record, obj2: Record !file.includes('index.md')) - .filter((file) => !nonAutomatedRestPaths.some((path) => file.includes(path))) + .filter((file) => !nonAutomatedRestPaths.some((excludePath) => file.includes(excludePath))) } diff --git a/src/rest/scripts/utils/sync.ts b/src/rest/scripts/utils/sync.ts index b245f127d0..3215caae5e 100644 --- a/src/rest/scripts/utils/sync.ts +++ b/src/rest/scripts/utils/sync.ts @@ -151,23 +151,21 @@ export async function getOpenApiSchemaFiles( // bundling the OpenAPI in github/github const schemaNames = schemas.map((schema) => path.basename(schema, '.json')) - const OPENAPI_VERSION_NAMES = Object.keys(allVersions).map( - (elem) => allVersions[elem].openApiVersionName, - ) + const versionNames = Object.keys(allVersions).map((elem) => allVersions[elem].openApiVersionName) for (const schema of schemaNames) { const schemaBasename = `${schema}.json` // If the version doesn't have calendar date versioning // it should have an exact match with one of the versions defined // in the allVersions object. - if (OPENAPI_VERSION_NAMES.includes(schema)) { + if (versionNames.includes(schema)) { webhookSchemas.push(schemaBasename) } // If the schema version has calendar date versioning, then one of // the versions defined in allVersions should be a substring of the // schema version. This means the schema version is a supported version - if (OPENAPI_VERSION_NAMES.some((elem) => schema.startsWith(elem))) { + if (versionNames.some((elem) => schema.startsWith(elem))) { // If the schema being evaluated is a calendar-date version, then // there would only be one exact match in the list of schema names. // If the schema being evaluated is a non-calendar-date version, then diff --git a/src/rest/tests/openapi-schema.ts b/src/rest/tests/openapi-schema.ts index 810489d5be..f479a0ae65 100644 --- a/src/rest/tests/openapi-schema.ts +++ b/src/rest/tests/openapi-schema.ts @@ -190,10 +190,12 @@ describe('OpenAPI schema validation', () => { }) }) -async function findOperation(version: string, method: string, path: string) { +async function findOperation(version: string, method: string, requestPath: string) { const allOperations = await getFlatListOfOperations(version) return allOperations.find((operation) => { - return operation.requestPath === path && operation.verb.toLowerCase() === method.toLowerCase() + return ( + operation.requestPath === requestPath && operation.verb.toLowerCase() === method.toLowerCase() + ) }) } diff --git a/src/rest/tests/rendering.ts b/src/rest/tests/rendering.ts index 645401bf00..ce2718f98c 100644 --- a/src/rest/tests/rendering.ts +++ b/src/rest/tests/rendering.ts @@ -23,7 +23,7 @@ describe('REST references docs', () => { .map((i, h2) => $(h2).attr('id')) .get() const schemaSlugs = checksRestOperations.map((operation) => slug(operation.title)) - expect(schemaSlugs.every((slug) => domH2Ids.includes(slug))).toBe(true) + expect(schemaSlugs.every((operationSlug) => domH2Ids.includes(operationSlug))).toBe(true) } }) diff --git a/src/search/components/input/SearchOverlay.tsx b/src/search/components/input/SearchOverlay.tsx index bf9d40ea8e..1b2894f77c 100644 --- a/src/search/components/input/SearchOverlay.tsx +++ b/src/search/components/input/SearchOverlay.tsx @@ -187,47 +187,45 @@ export function SearchOverlay({ // Combine options for key navigation const [combinedOptions, generalOptionsWithViewStatus, aiOptionsWithUserInput] = useMemo(() => { setAnnouncement('') - let generalOptionsWithViewStatus = [...generalSearchResults] - const aiOptionsWithUserInput = [...userInputOptions, ...filteredAIOptions] - const combinedOptions = [] as Array<{ + let generalWithView = [...generalSearchResults] + const aiWithUser = [...userInputOptions, ...filteredAIOptions] + const combined = [] as Array<{ group: 'general' | 'ai' | string url?: string option: AutocompleteSearchHitWithUserQuery | GeneralSearchHitWithOptions }> if (generalSearchResults.length > 0) { - generalOptionsWithViewStatus.push({ + generalWithView.push({ title: t('search.overlay.view_all_search_results'), isViewAllResults: true, } as any) } else if (autoCompleteSearchError) { if (urlSearchInputQuery.trim() !== '') { - generalOptionsWithViewStatus.push({ + generalWithView.push({ ...(userInputOptions[0] || {}), isSearchDocsOption: true, } as unknown as GeneralSearchHit) } } else if (urlSearchInputQuery.trim() !== '' && !searchLoading) { setAnnouncement(t('search.overlay.no_results_found_announcement')) - generalOptionsWithViewStatus.push({ + generalWithView.push({ title: t('search.overlay.no_results_found'), isNoResultsFound: true, } as any) } else { - generalOptionsWithViewStatus = [] + generalWithView = [] } // NOTE: Order of combinedOptions is important, since 'selectedIndex' is used to navigate the combinedOptions array // Add general options _before_ AI options - combinedOptions.push( - ...generalOptionsWithViewStatus.map((option) => ({ group: 'general', option })), - ) + combined.push(...generalWithView.map((option) => ({ group: 'general', option }))) // On AI Error, don't include AI suggestions, only user input if (!aiSearchError && !isAskAIState) { - combinedOptions.push(...aiOptionsWithUserInput.map((option) => ({ group: 'ai', option }))) + combined.push(...aiWithUser.map((option) => ({ group: 'ai', option }))) } else if (isAskAIState && !aiCouldNotAnswer) { // When "ask ai" state is reached, we have references that are ActionList items. // We want to navigate these items via the keyboard, so include them in the combinedOptions array - combinedOptions.push( + combined.push( ...aiReferences.map((option) => ({ group: 'reference', // The references are actually article URLs that we want to navigate to url: option.url, @@ -240,7 +238,7 @@ export function SearchOverlay({ ) } - return [combinedOptions, generalOptionsWithViewStatus, aiOptionsWithUserInput] + return [combined, generalWithView, aiWithUser] }, [ generalSearchResults, totalGeneralSearchResults, diff --git a/src/search/components/results/SearchResults.tsx b/src/search/components/results/SearchResults.tsx index 27c3a0d5b2..399d011cc6 100644 --- a/src/search/components/results/SearchResults.tsx +++ b/src/search/components/results/SearchResults.tsx @@ -159,12 +159,12 @@ function ResultsPagination({ page, totalPages }: { page: number; totalPages: num } }, [asPath]) - function hrefBuilder(page: number) { + function hrefBuilder(pageNumber: number) { const params = new URLSearchParams(asPathQuery) - if (page === 1) { + if (pageNumber === 1) { params.delete('page') } else { - params.set('page', `${page}`) + params.set('page', `${pageNumber}`) } return `/${router.locale}${asPathRoot}?${params}` } @@ -176,22 +176,22 @@ function ResultsPagination({ page, totalPages }: { page: number; totalPages: num pageCount={Math.min(totalPages, 10)} currentPage={page} hrefBuilder={hrefBuilder} - onPageChange={(event, page) => { + onPageChange={(event, pageNum) => { event.preventDefault() - const [asPathRoot, asPathQuery = ''] = router.asPath.split('#')[0].split('?') - const params = new URLSearchParams(asPathQuery) - if (page !== 1) { - params.set('page', `${page}`) + const [pathRoot, pathQuery = ''] = router.asPath.split('#')[0].split('?') + const params = new URLSearchParams(pathQuery) + if (pageNum !== 1) { + params.set('page', `${pageNum}`) } else { params.delete('page') } - let asPath = `/${router.locale}${asPathRoot}` + let newPath = `/${router.locale}${pathRoot}` if (params.toString()) { - asPath += `?${params}` + newPath += `?${params}` } - setAsPath(asPath) - router.push(asPath) + setAsPath(newPath) + router.push(newPath) }} /> diff --git a/src/search/lib/elasticsearch-versions.ts b/src/search/lib/elasticsearch-versions.ts index d365ddda57..7e09dc2718 100644 --- a/src/search/lib/elasticsearch-versions.ts +++ b/src/search/lib/elasticsearch-versions.ts @@ -100,11 +100,11 @@ export function getPlanVersionFromIndexVersion(indexVersion: string): string { // This is needed for scraping since the pages use the 'allVersions' key as their version export function getAllVersionsKeyFromIndexVersion(indexVersion: string): string { const key = Object.keys(allVersions).find( - (key) => - key === indexVersion || - allVersions[key].shortName === indexVersion || - allVersions[key].plan === indexVersion || - allVersions[key].miscVersionName === indexVersion, + (versionKey) => + versionKey === indexVersion || + allVersions[versionKey].shortName === indexVersion || + allVersions[versionKey].plan === indexVersion || + allVersions[versionKey].miscVersionName === indexVersion, ) if (!key) { diff --git a/src/search/scripts/analyze-text.ts b/src/search/scripts/analyze-text.ts index 60833e28e3..3a13566ab6 100755 --- a/src/search/scripts/analyze-text.ts +++ b/src/search/scripts/analyze-text.ts @@ -79,8 +79,8 @@ try { process.exit(1) } -async function main(opts: Options, args: string[]): Promise { - const texts = [args.join(' ')] +async function main(opts: Options, textArgs: string[]): Promise { + const texts = [textArgs.join(' ')] if (!opts.elasticsearchUrl && !process.env.ELASTICSEARCH_URL) { throw new Error( 'Must pass the elasticsearch URL option or ' + diff --git a/src/search/scripts/index/lib/index-general-search.ts b/src/search/scripts/index/lib/index-general-search.ts index c588489899..7116794379 100644 --- a/src/search/scripts/index/lib/index-general-search.ts +++ b/src/search/scripts/index/lib/index-general-search.ts @@ -98,7 +98,7 @@ export async function indexGeneralSearch(sourceDirectory: string, opts: Options) versionsToIndex, ) - for (const language of languages) { + for (const lang of languages) { let count = 0 for (const versionKey of versionsToIndex) { const startTime = new Date() @@ -106,11 +106,11 @@ export async function indexGeneralSearch(sourceDirectory: string, opts: Options) const { indexName, indexAlias } = getElasticSearchIndex( 'generalSearch', versionKey, - language, + lang, opts.indexPrefix || '', ) - await indexVersion(client, indexName, indexAlias, language, sourceDirectory, opts) + await indexVersion(client, indexName, indexAlias, lang, sourceDirectory, opts) count++ if (opts.staggerSeconds && count < versionsToIndex.length - 1) { diff --git a/src/search/scripts/index/utils/indexing-elasticsearch-utils.ts b/src/search/scripts/index/utils/indexing-elasticsearch-utils.ts index 5b2b559d54..0db377ab67 100644 --- a/src/search/scripts/index/utils/indexing-elasticsearch-utils.ts +++ b/src/search/scripts/index/utils/indexing-elasticsearch-utils.ts @@ -64,11 +64,11 @@ export async function populateIndex( { attempts, sleepTime, - onError: (_, attempts, sleepTime) => { + onError: (_, remainingAttempts, sleepMs) => { console.warn( chalk.yellow( - `Failed to bulk index ${indexName}. Will attempt ${attempts} more times (after ${ - sleepTime / 1000 + `Failed to bulk index ${indexName}. Will attempt ${remainingAttempts} more times (after ${ + sleepMs / 1000 }s sleep).`, ), ) diff --git a/src/shielding/middleware/handle-invalid-query-string-values.ts b/src/shielding/middleware/handle-invalid-query-string-values.ts index b0a660dfc2..6efa459610 100644 --- a/src/shielding/middleware/handle-invalid-query-string-values.ts +++ b/src/shielding/middleware/handle-invalid-query-string-values.ts @@ -42,9 +42,9 @@ export default function handleInvalidQuerystringValues( for (const [key, value] of Object.entries(query)) { if (RECOGNIZED_VALUES_KEYS.has(key)) { const validValues = RECOGNIZED_VALUES[key as keyof typeof RECOGNIZED_VALUES] - const value = query[key] - const values = Array.isArray(value) ? value : [value] - if (values.some((value) => typeof value === 'string' && !validValues.includes(value))) { + const queryValue = query[key] + const values = Array.isArray(queryValue) ? queryValue : [queryValue] + if (values.some((val) => typeof val === 'string' && !validValues.includes(val))) { if (process.env.NODE_ENV === 'development') { console.warn( 'Warning! Invalid query string *value* detected. %O is not one of %O', diff --git a/src/shielding/tests/invalid-querystrings.ts b/src/shielding/tests/invalid-querystrings.ts index 992d805c38..3527f2ca66 100644 --- a/src/shielding/tests/invalid-querystrings.ts +++ b/src/shielding/tests/invalid-querystrings.ts @@ -53,9 +53,9 @@ describe('invalid query strings', () => { expect(res.headers.location).toBe('/en') // But note that it only applies to the home page! { - const url = `/en/get-started?${randomCharacters(8)}` - const res = await get(url) - expect(res.statusCode).toBe(200) + const nestedUrl = `/en/get-started?${randomCharacters(8)}` + const nestedRes = await get(nestedUrl) + expect(nestedRes.statusCode).toBe(200) } }) diff --git a/src/tools/components/PlatformPicker.tsx b/src/tools/components/PlatformPicker.tsx index c2d427fdd8..ceb02cbeb7 100644 --- a/src/tools/components/PlatformPicker.tsx +++ b/src/tools/components/PlatformPicker.tsx @@ -19,7 +19,7 @@ const platforms = [ function showPlatformSpecificContent(platform: string) { const markdowns = Array.from(document.querySelectorAll('.ghd-tool')) markdowns - .filter((el) => platforms.some((platform) => el.classList.contains(platform.value))) + .filter((el) => platforms.some((platformValue) => el.classList.contains(platformValue.value))) .forEach((el) => { el.style.display = el.classList.contains(platform) ? '' : 'none' @@ -36,7 +36,7 @@ function showPlatformSpecificContent(platform: string) { // example: inline content const platformEls = Array.from( document.querySelectorAll( - platforms.map((platform) => `.platform-${platform.value}`).join(', '), + platforms.map((platformOption) => `.platform-${platformOption.value}`).join(', '), ), ) platformEls.forEach((el) => { diff --git a/src/tools/components/ToolPicker.tsx b/src/tools/components/ToolPicker.tsx index df023209f9..32472ac8fa 100644 --- a/src/tools/components/ToolPicker.tsx +++ b/src/tools/components/ToolPicker.tsx @@ -14,7 +14,7 @@ import { InArticlePicker } from './InArticlePicker' function showToolSpecificContent(tool: string, supportedTools: Array) { const markdowns = Array.from(document.querySelectorAll('.ghd-tool')) markdowns - .filter((el) => supportedTools.some((tool) => el.classList.contains(tool))) + .filter((el) => supportedTools.some((toolName) => el.classList.contains(toolName))) .forEach((el) => { el.style.display = el.classList.contains(tool) ? '' : 'none' @@ -31,7 +31,7 @@ function showToolSpecificContent(tool: string, supportedTools: Array) { // example: inline content const toolEls = Array.from( document.querySelectorAll( - supportedTools.map((tool) => `.tool-${tool}`).join(', '), + supportedTools.map((toolOption) => `.tool-${toolOption}`).join(', '), ), ) toolEls.forEach((el) => { diff --git a/src/versions/middleware/features.ts b/src/versions/middleware/features.ts index 45aa77eb21..4b170e9329 100644 --- a/src/versions/middleware/features.ts +++ b/src/versions/middleware/features.ts @@ -37,7 +37,7 @@ function getFeaturesByVersion(currentVersion: string): Record { allFeatures = getDeepDataByLanguage('features', 'en') as Record } - const features: { + const featureFlags: { [feature: string]: boolean } = {} // Determine whether the currentVersion belongs to the list of versions the feature is available in. @@ -51,9 +51,9 @@ function getFeaturesByVersion(currentVersion: string): Record { // Adding the resulting boolean to the context object gives us the ability to use // `{% if featureName ... %}` conditionals in content files. const isFeatureAvailableInCurrentVersion = applicableVersions.includes(currentVersion) - features[featureName] = isFeatureAvailableInCurrentVersion + featureFlags[featureName] = isFeatureAvailableInCurrentVersion } - cache.set(currentVersion, features) + cache.set(currentVersion, featureFlags) } return cache.get(currentVersion) diff --git a/src/webhooks/tests/rendering.ts b/src/webhooks/tests/rendering.ts index cec11e66dd..f290eaeaf9 100644 --- a/src/webhooks/tests/rendering.ts +++ b/src/webhooks/tests/rendering.ts @@ -78,7 +78,7 @@ describe('webhooks events and payloads', () => { payloadExampleElem.each((i, elem) => { const siblings = $(elem) .nextUntil('[id^=webhook-payload-example]') - .filter((i, elem) => $(elem).hasClass('height-constrained-code-block')) + .filter((idx, sibling) => $(sibling).hasClass('height-constrained-code-block')) expect(siblings.length).toBeGreaterThan(0) }) } diff --git a/src/workflows/experimental/readability-report.ts b/src/workflows/experimental/readability-report.ts index d3ffa06f13..98ab9a67d2 100644 --- a/src/workflows/experimental/readability-report.ts +++ b/src/workflows/experimental/readability-report.ts @@ -162,8 +162,8 @@ function getChangedContentFiles(): string[] { }) } -function makeURL(path: string): string { - return `http://localhost:4000${path}` +function makeURL(urlPath: string): string { + return `http://localhost:4000${urlPath}` } async function waitForServer(): Promise { diff --git a/src/workflows/issue-report.ts b/src/workflows/issue-report.ts index e6fa75e09f..7d0c387300 100644 --- a/src/workflows/issue-report.ts +++ b/src/workflows/issue-report.ts @@ -101,13 +101,13 @@ export async function linkReports({ } // Comment on all previous reports that are still open - for (const previousReport of previousReports) { - if (previousReport.state === 'closed' || previousReport.html_url === newReport.html_url) { + for (const oldReport of previousReports) { + if (oldReport.state === 'closed' || oldReport.html_url === newReport.html_url) { continue } // If an old report is not assigned to someone we close it - const shouldClose = !previousReport.assignees?.length + const shouldClose = !oldReport.assignees?.length let body = `➡️ [Newer report](${newReport.html_url})` if (shouldClose) { body += '\n\nClosing in favor of newer report since there are no assignees on this issue' @@ -116,14 +116,12 @@ export async function linkReports({ await octokit.rest.issues.createComment({ owner, repo, - issue_number: previousReport.number, + issue_number: oldReport.number, body, }) - core.info( - `Linked old report to new report via comment on old report: #${previousReport.number}.`, - ) + core.info(`Linked old report to new report via comment on old report: #${oldReport.number}.`) } catch (error) { - core.setFailed(`Error commenting on previousReport, #${previousReport.number}`) + core.setFailed(`Error commenting on previousReport, #${oldReport.number}`) throw error } if (shouldClose) { @@ -131,12 +129,12 @@ export async function linkReports({ await octokit.rest.issues.update({ owner, repo, - issue_number: previousReport.number, + issue_number: oldReport.number, state: 'closed', }) - core.info(`Closing old report: #${previousReport.number} because it doesn't have assignees`) + core.info(`Closing old report: #${oldReport.number} because it doesn't have assignees`) } catch (error) { - core.setFailed(`Error closing previousReport, #${previousReport.number}`) + core.setFailed(`Error closing previousReport, #${oldReport.number}`) throw error } } diff --git a/src/workflows/projects.ts b/src/workflows/projects.ts index 10d9ba747a..fc953142d8 100644 --- a/src/workflows/projects.ts +++ b/src/workflows/projects.ts @@ -6,7 +6,7 @@ import { graphql } from '@octokit/graphql' // Pull out the node ID of a project field export function findFieldID(fieldName: string, data: Record) { const field = data.organization.projectV2.fields.nodes.find( - (field: Record) => field.name === fieldName, + (fieldNode: Record) => fieldNode.name === fieldName, ) if (field && field.id) { @@ -23,14 +23,14 @@ export function findSingleSelectID( data: Record, ) { const field = data.organization.projectV2.fields.nodes.find( - (field: Record) => field.name === fieldName, + (fieldData: Record) => fieldData.name === fieldName, ) if (!field) { throw new Error(`A field called "${fieldName}" was not found. Check if the field was renamed.`) } const singleSelect = field.options.find( - (field: Record) => field.name === singleSelectName, + (option: Record) => option.name === singleSelectName, ) if (singleSelect && singleSelect.id) { @@ -203,7 +203,7 @@ export function generateUpdateProjectV2ItemFieldMutation({ // Build the mutation to update a single project field // Specify literal=true to indicate that the value should be used as a string, not a variable function generateMutationToUpdateField({ - item, + item: itemId, fieldID, value, fieldType, @@ -220,12 +220,12 @@ export function generateUpdateProjectV2ItemFieldMutation({ // Strip all non-alphanumeric out of the item ID when creating the mutation ID to avoid a GraphQL parsing error // (statistically, this should still give us a unique mutation ID) return ` - set_${fieldID.slice(1)}_item_${item.replaceAll( + set_${fieldID.slice(1)}_item_${itemId.replaceAll( /[^a-z0-9]/g, '', )}: updateProjectV2ItemFieldValue(input: { projectId: $project - itemId: "${item}" + itemId: "${itemId}" fieldId: ${fieldID} value: { ${parsedValue} } }) { diff --git a/src/workflows/walk-files.ts b/src/workflows/walk-files.ts index 017855e7f9..9c4c6cc2cb 100644 --- a/src/workflows/walk-files.ts +++ b/src/workflows/walk-files.ts @@ -16,7 +16,10 @@ export default function walkFiles( const walkSyncOpts = { includeBasePath: true, directories: false } return walk(dir, walkSyncOpts) - .filter((file) => extensions.some((ext) => file.endsWith(ext)) && !file.endsWith('README.md')) + .filter( + (file) => + extensions.some((extension) => file.endsWith(extension)) && !file.endsWith('README.md'), + ) .filter((file) => (opts.includeEarlyAccess ? file : !file.includes('/early-access/'))) }