1
0
mirror of synced 2025-12-19 09:57:42 -05:00

Enable github/array-foreach ESLint rule by converting all forEach to for loops (#58302)

Co-authored-by: Robert Sese <734194+rsese@users.noreply.github.com>
This commit is contained in:
Kevin Heis
2025-11-12 11:18:21 -08:00
committed by GitHub
parent 86216fb38b
commit 8adc699635
124 changed files with 777 additions and 718 deletions

View File

@@ -96,7 +96,6 @@ export default [
camelcase: 'off', // Many gh apis use underscores, 600+ uses camelcase: 'off', // Many gh apis use underscores, 600+ uses
// Disabled rules to review // Disabled rules to review
'github/array-foreach': 'off', // 250+
'no-console': 'off', // 800+ 'no-console': 'off', // 800+
'@typescript-eslint/no-explicit-any': 'off', // 1000+ '@typescript-eslint/no-explicit-any': 'off', // 1000+
}, },

View File

@@ -48,9 +48,9 @@ const editorTypes: EditorTypes = {
const refinementDescriptions = (): string => { const refinementDescriptions = (): string => {
let str = '\n\n' let str = '\n\n'
Object.entries(editorTypes).forEach(([ed, edObj]) => { for (const [ed, edObj] of Object.entries(editorTypes)) {
str += ` ${ed.padEnd(12)} ${edObj.description}\n` str += ` ${ed.padEnd(12)} ${edObj.description}\n`
}) }
return str return str
} }
@@ -155,10 +155,10 @@ async function callEditor(
const prompt = yaml.load(fs.readFileSync(promptTemplatePath, 'utf8')) as PromptData const prompt = yaml.load(fs.readFileSync(promptTemplatePath, 'utf8')) as PromptData
prompt.messages.forEach((msg) => { for (const msg of prompt.messages) {
msg.content = msg.content.replace('{{markdownPrompt}}', markdownPrompt) msg.content = msg.content.replace('{{markdownPrompt}}', markdownPrompt)
msg.content = msg.content.replace('{{input}}', content) msg.content = msg.content.replace('{{input}}', content)
}) }
return callModelsApi(prompt) return callModelsApi(prompt)
} }

View File

@@ -117,7 +117,7 @@ function extractExample(commentBlock: string): string {
function generateMarkdown(apiDocs: any[]): string { function generateMarkdown(apiDocs: any[]): string {
let markdown = '## Reference: API endpoints\n\n' let markdown = '## Reference: API endpoints\n\n'
apiDocs.forEach((doc) => { for (const doc of apiDocs) {
markdown += `### ${doc.method.toUpperCase()} ${doc.path}\n\n` markdown += `### ${doc.method.toUpperCase()} ${doc.path}\n\n`
markdown += `${doc.description}\n\n` markdown += `${doc.description}\n\n`
@@ -142,7 +142,7 @@ function generateMarkdown(apiDocs: any[]): string {
} }
markdown += '---\n\n' markdown += '---\n\n'
}) }
return markdown return markdown
} }

View File

@@ -41,22 +41,16 @@ describe.each(allVersionKeys)('pagelist api for %s', async (versionKey) => {
expression = new RegExp(`/\\w{2}(/${versionKey})?/?.*`) expression = new RegExp(`/\\w{2}(/${versionKey})?/?.*`)
else expression = new RegExp(`/\\w{2}/${versionKey}/?.*`) else expression = new RegExp(`/\\w{2}/${versionKey}/?.*`)
res.body for (const permalink of res.body.trim().split('\n')) {
.trim() expect(permalink).toMatch(expression)
.split('\n') }
.forEach((permalink: string) => {
expect(permalink).toMatch(expression)
})
}) })
test('English requests only returns urls that contain /en', async () => { test('English requests only returns urls that contain /en', async () => {
const expression = new RegExp(`^/en(/${nonEnterpriseDefaultVersion})?/?.*`) const expression = new RegExp(`^/en(/${nonEnterpriseDefaultVersion})?/?.*`)
res.body for (const permalink of res.body.trim().split('\n')) {
.trim() expect(permalink).toMatch(expression)
.split('\n') }
.forEach((permalink: string) => {
expect(permalink).toMatch(expression)
})
}) })
}) })

View File

@@ -25,9 +25,7 @@ const images = await Promise.all(
return { relativePath, width, height, size } return { relativePath, width, height, size }
}), }),
) )
images for (const image of images.sort((a, b) => b.size - a.size)) {
.sort((a, b) => b.size - a.size) const { relativePath, width, height } = image
.forEach((image) => { console.log(`${width} x ${height} - ${relativePath}`)
const { relativePath, width, height } = image }
console.log(`${width} x ${height} - ${relativePath}`)
})

View File

@@ -317,14 +317,14 @@ export async function filterAndUpdateGhesDataByAllowlistValues({
// Categorizes the given array of audit log events by event category // Categorizes the given array of audit log events by event category
function categorizeEvents(events: AuditLogEventT[]) { function categorizeEvents(events: AuditLogEventT[]) {
const categorizedEvents: CategorizedEvents = {} const categorizedEvents: CategorizedEvents = {}
events.forEach((event) => { for (const event of events) {
const [category] = event.action.split('.') const [category] = event.action.split('.')
if (!Object.hasOwn(categorizedEvents, category)) { if (!Object.hasOwn(categorizedEvents, category)) {
categorizedEvents[category] = [] categorizedEvents[category] = []
} }
categorizedEvents[category].push(event) categorizedEvents[category].push(event)
}) }
return categorizedEvents return categorizedEvents
} }

View File

@@ -180,7 +180,7 @@ async function main() {
await mkdirp(auditLogVersionDirPath) await mkdirp(auditLogVersionDirPath)
} }
Object.values(AUDIT_LOG_PAGES).forEach(async (page) => { for (const page of Object.values(AUDIT_LOG_PAGES)) {
const auditLogSchemaFilePath = path.join(auditLogVersionDirPath, `${page}.json`) const auditLogSchemaFilePath = path.join(auditLogVersionDirPath, `${page}.json`)
if (auditLogData[version][page]) { if (auditLogData[version][page]) {
@@ -188,9 +188,8 @@ async function main() {
auditLogSchemaFilePath, auditLogSchemaFilePath,
JSON.stringify(auditLogData[version][page], null, 2), JSON.stringify(auditLogData[version][page], null, 2),
) )
console.log(`✅ Wrote ${auditLogSchemaFilePath}`)
} }
}) }
} }
} }

View File

@@ -31,10 +31,10 @@ describe('Audit log fields functionality', () => {
if (eventWithFields) { if (eventWithFields) {
expect(Array.isArray(eventWithFields.fields)).toBe(true) expect(Array.isArray(eventWithFields.fields)).toBe(true)
eventWithFields.fields!.forEach((field) => { for (const field of eventWithFields.fields!) {
expect(typeof field).toBe('string') expect(typeof field).toBe('string')
expect(field.length).toBeGreaterThan(0) expect(field.length).toBeGreaterThan(0)
}) }
} }
}) })
@@ -42,14 +42,14 @@ describe('Audit log fields functionality', () => {
// Some events might not have fields, this should not break anything // Some events might not have fields, this should not break anything
const events = getAuditLogEvents('organization', 'enterprise-cloud@latest') const events = getAuditLogEvents('organization', 'enterprise-cloud@latest')
events.forEach((event) => { for (const event of events) {
expect(event).toHaveProperty('action') expect(event).toHaveProperty('action')
expect(event).toHaveProperty('description') expect(event).toHaveProperty('description')
// fields property is optional // fields property is optional
if (event.fields) { if (event.fields) {
expect(Array.isArray(event.fields)).toBe(true) expect(Array.isArray(event.fields)).toBe(true)
} }
}) }
}) })
test('should include common audit log fields', () => { test('should include common audit log fields', () => {
@@ -82,19 +82,19 @@ describe('Audit log fields functionality', () => {
expect(categories.length).toBeGreaterThan(0) expect(categories.length).toBeGreaterThan(0)
// Check that events in categories have proper structure including fields // Check that events in categories have proper structure including fields
categories.forEach((category) => { for (const category of categories) {
const events = categorizedEvents[category] const events = categorizedEvents[category]
expect(Array.isArray(events)).toBe(true) expect(Array.isArray(events)).toBe(true)
events.forEach((event: AuditLogEventT) => { for (const event of events as AuditLogEventT[]) {
expect(event).toHaveProperty('action') expect(event).toHaveProperty('action')
expect(event).toHaveProperty('description') expect(event).toHaveProperty('description')
// fields is optional but if present should be array // fields is optional but if present should be array
if (event.fields) { if (event.fields) {
expect(Array.isArray(event.fields)).toBe(true) expect(Array.isArray(event.fields)).toBe(true)
} }
}) }
}) }
}) })
test('should preserve fields data through categorization', () => { test('should preserve fields data through categorization', () => {
@@ -127,12 +127,12 @@ describe('Audit log fields functionality', () => {
test('should not have duplicate fields in same event', () => { test('should not have duplicate fields in same event', () => {
const events = getAuditLogEvents('organization', 'enterprise-cloud@latest') const events = getAuditLogEvents('organization', 'enterprise-cloud@latest')
events.forEach((event) => { for (const event of events) {
if (event.fields) { if (event.fields) {
const uniqueFields = new Set(event.fields) const uniqueFields = new Set(event.fields)
expect(uniqueFields.size).toBe(event.fields.length) expect(uniqueFields.size).toBe(event.fields.length)
} }
}) }
}) })
test('should have reasonable field names', () => { test('should have reasonable field names', () => {
@@ -140,7 +140,7 @@ describe('Audit log fields functionality', () => {
const eventWithFields = events.find((event) => event.fields && event.fields.length > 0) const eventWithFields = events.find((event) => event.fields && event.fields.length > 0)
if (eventWithFields) { if (eventWithFields) {
eventWithFields.fields!.forEach((field) => { for (const field of eventWithFields.fields!) {
// Field names should be reasonable strings // Field names should be reasonable strings
expect(field).toBeTruthy() expect(field).toBeTruthy()
expect(typeof field).toBe('string') expect(typeof field).toBe('string')
@@ -149,33 +149,33 @@ describe('Audit log fields functionality', () => {
// Should not contain special characters that would break display // Should not contain special characters that would break display
expect(field).not.toMatch(/[<>'"&]/) expect(field).not.toMatch(/[<>'"&]/)
}) }
} }
}) })
test('should handle different page types consistently', () => { test('should handle different page types consistently', () => {
const pageTypes = ['organization', 'enterprise', 'user'] const pageTypes = ['organization', 'enterprise', 'user']
pageTypes.forEach((pageType) => { for (const pageType of pageTypes) {
try { try {
const events = getAuditLogEvents(pageType, 'enterprise-cloud@latest') const events = getAuditLogEvents(pageType, 'enterprise-cloud@latest')
events.forEach((event) => { for (const event of events) {
expect(event).toHaveProperty('action') expect(event).toHaveProperty('action')
expect(event).toHaveProperty('description') expect(event).toHaveProperty('description')
if (event.fields) { if (event.fields) {
expect(Array.isArray(event.fields)).toBe(true) expect(Array.isArray(event.fields)).toBe(true)
event.fields.forEach((field) => { for (const field of event.fields) {
expect(typeof field).toBe('string') expect(typeof field).toBe('string')
}) }
} }
}) }
} catch (error) { } catch (error) {
// Some page types might not exist for certain versions, that's ok // Some page types might not exist for certain versions, that's ok
console.log(`Skipping ${pageType} page type due to: ${error}`) console.log(`Skipping ${pageType} page type due to: ${error}`)
} }
}) }
}) })
}) })
@@ -194,9 +194,9 @@ describe('Audit log fields functionality', () => {
if (fields) { if (fields) {
expect(Array.isArray(fields)).toBe(true) expect(Array.isArray(fields)).toBe(true)
fields.forEach((field) => { for (const field of fields) {
expect(typeof field).toBe('string') expect(typeof field).toBe('string')
}) }
} }
}) })
}) })

View File

@@ -16,10 +16,10 @@ describe('audit log category notes', () => {
test('category notes are strings', () => { test('category notes are strings', () => {
if (config.categoryNotes) { if (config.categoryNotes) {
Object.values(config.categoryNotes).forEach((note) => { for (const note of Object.values(config.categoryNotes)) {
expect(typeof note).toBe('string') expect(typeof note).toBe('string')
expect(note.length).toBeGreaterThan(0) expect(note.length).toBeGreaterThan(0)
}) }
} }
}) })
@@ -51,13 +51,13 @@ describe('audit log category notes', () => {
expect(Object.keys(enterpriseEvents).length).toBeGreaterThan(0) expect(Object.keys(enterpriseEvents).length).toBeGreaterThan(0)
// Each category should still contain arrays of events // Each category should still contain arrays of events
Object.values(organizationEvents).forEach((events) => { for (const events of Object.values(organizationEvents)) {
expect(Array.isArray(events)).toBe(true) expect(Array.isArray(events)).toBe(true)
if (events.length > 0) { if (events.length > 0) {
expect(events[0]).toHaveProperty('action') expect(events[0]).toHaveProperty('action')
expect(events[0]).toHaveProperty('description') expect(events[0]).toHaveProperty('description')
} }
}) }
}) })
test('category notes are properly typed', () => { test('category notes are properly typed', () => {

View File

@@ -399,7 +399,9 @@ async function getIndexFileVersions(
throw new Error(`Frontmatter in ${filepath} does not contain versions.`) throw new Error(`Frontmatter in ${filepath} does not contain versions.`)
} }
const fmVersions = getApplicableVersions(data.versions) const fmVersions = getApplicableVersions(data.versions)
fmVersions.forEach((version: string) => versions.add(version)) for (const version of fmVersions) {
versions.add(version)
}
}), }),
) )
const versionArray = [...versions] const versionArray = [...versions]
@@ -431,7 +433,7 @@ export async function convertVersionsToFrontmatter(
// Currently, only GHES is numbered. Number releases have to be // Currently, only GHES is numbered. Number releases have to be
// handled differently because they use semantic versioning. // handled differently because they use semantic versioning.
versions.forEach((version) => { for (const version of versions) {
const docsVersion = allVersions[version] const docsVersion = allVersions[version]
if (!docsVersion.hasNumberedReleases) { if (!docsVersion.hasNumberedReleases) {
frontmatterVersions[docsVersion.shortName] = '*' frontmatterVersions[docsVersion.shortName] = '*'
@@ -455,10 +457,10 @@ export async function convertVersionsToFrontmatter(
numberedReleases[docsVersion.shortName].availableReleases[i] = docsVersion.currentRelease numberedReleases[docsVersion.shortName].availableReleases[i] = docsVersion.currentRelease
} }
} }
}) }
// Create semantic versions for numbered releases // Create semantic versions for numbered releases
Object.keys(numberedReleases).forEach((key) => { for (const key of Object.keys(numberedReleases)) {
const availableReleases = numberedReleases[key].availableReleases const availableReleases = numberedReleases[key].availableReleases
const versionContinuity = checkVersionContinuity(availableReleases) const versionContinuity = checkVersionContinuity(availableReleases)
if (availableReleases.every(Boolean)) { if (availableReleases.every(Boolean)) {
@@ -483,7 +485,7 @@ export async function convertVersionsToFrontmatter(
} }
frontmatterVersions[key] = semVer.join(' ') frontmatterVersions[key] = semVer.join(' ')
} }
}) }
const sortedFrontmatterVersions = Object.keys(frontmatterVersions) const sortedFrontmatterVersions = Object.keys(frontmatterVersions)
.sort() .sort()
.reduce((acc: { [key: string]: string }, key) => { .reduce((acc: { [key: string]: string }, key) => {

View File

@@ -88,9 +88,9 @@ describe('automated content directory updates', () => {
// Because of that, we need to update the content paths to use the // Because of that, we need to update the content paths to use the
// full file path. // full file path.
const contentDataFullPath: { [key: string]: ContentItem } = {} const contentDataFullPath: { [key: string]: ContentItem } = {}
Object.keys(newContentData).forEach( for (const key of Object.keys(newContentData)) {
(key: string) => (contentDataFullPath[path.join(targetDirectory, key)] = newContentData[key]), contentDataFullPath[path.join(targetDirectory, key)] = newContentData[key]
) }
// Rewrites the content directory in the operating system's // Rewrites the content directory in the operating system's
// temp directory. // temp directory.

View File

@@ -130,11 +130,11 @@ export async function convertContentToDocs(
// There are some keywords like [Plumbing] used by the code comments // There are some keywords like [Plumbing] used by the code comments
// but we don't want to render them in the docs. // but we don't want to render them in the docs.
if (node.type === 'text' && node.value) { if (node.type === 'text' && node.value) {
removeKeywords.forEach((keyword) => { for (const keyword of removeKeywords) {
if (node.value.includes(keyword)) { if (node.value.includes(keyword)) {
node.value = node.value.replace(keyword, '').trim() node.value = node.value.replace(keyword, '').trim()
} }
}) }
} }
// The subsections under the main headings (level 2) are commands // The subsections under the main headings (level 2) are commands

View File

@@ -82,13 +82,13 @@ export async function getLintableYml(dataFilePath: string): Promise<Record<strin
// 'data/variables/product.yml /pat_v1_caps' // 'data/variables/product.yml /pat_v1_caps'
function addPathToKey(mdDictMap: Map<string, string>, dataFilePath: string): Map<string, string> { function addPathToKey(mdDictMap: Map<string, string>, dataFilePath: string): Map<string, string> {
const keys = Array.from(mdDictMap.keys()) const keys = Array.from(mdDictMap.keys())
keys.forEach((key) => { for (const key of keys) {
const newKey = `${dataFilePath} ${key}` const newKey = `${dataFilePath} ${key}`
const value = mdDictMap.get(key) const value = mdDictMap.get(key)
if (value !== undefined) { if (value !== undefined) {
mdDictMap.delete(key) mdDictMap.delete(key)
mdDictMap.set(newKey, value) mdDictMap.set(newKey, value)
} }
}) }
return mdDictMap return mdDictMap
} }

View File

@@ -24,7 +24,7 @@ interface ProcessedValidationError {
export function formatAjvErrors(errors: AjvValidationError[] = []): ProcessedValidationError[] { export function formatAjvErrors(errors: AjvValidationError[] = []): ProcessedValidationError[] {
const processedErrors: ProcessedValidationError[] = [] const processedErrors: ProcessedValidationError[] = []
errors.forEach((errorObj: AjvValidationError) => { for (const errorObj of errors) {
const error: Partial<ProcessedValidationError> = {} const error: Partial<ProcessedValidationError> = {}
error.instancePath = error.instancePath =
@@ -58,7 +58,7 @@ export function formatAjvErrors(errors: AjvValidationError[] = []): ProcessedVal
} }
processedErrors.push(error as ProcessedValidationError) processedErrors.push(error as ProcessedValidationError)
}) }
return processedErrors return processedErrors
} }

View File

@@ -114,11 +114,12 @@ export function filterTokensByOrder(
// first token (root) in the tokenOrder array // first token (root) in the tokenOrder array
const tokenRootIndexes: number[] = [] const tokenRootIndexes: number[] = []
const firstTokenOrderType = tokenOrder[0] const firstTokenOrderType = tokenOrder[0]
tokens.forEach((token, index) => { for (let index = 0; index < tokens.length; index++) {
const token = tokens[index]
if (token.type === firstTokenOrderType) { if (token.type === firstTokenOrderType) {
tokenRootIndexes.push(index) tokenRootIndexes.push(index)
} }
}) }
// Loop through each root token index and check if // Loop through each root token index and check if
// the order matches the tokenOrder array // the order matches the tokenOrder array

View File

@@ -17,9 +17,10 @@ export const codeAnnotationCommentSpacing = {
const lines = content.split('\n') const lines = content.split('\n')
lines.forEach((line: string, index: number) => { for (let index = 0; index < lines.length; index++) {
const line: string = lines[index]
const trimmedLine = line.trim() const trimmedLine = line.trim()
if (!trimmedLine) return if (!trimmedLine) continue
// Define a map of comment patterns // Define a map of comment patterns
const commentPatterns: Record<string, RegExp> = { const commentPatterns: Record<string, RegExp> = {
@@ -46,7 +47,7 @@ export const codeAnnotationCommentSpacing = {
if (commentMatch && restOfLine !== null && commentChar !== null) { if (commentMatch && restOfLine !== null && commentChar !== null) {
// Skip shebang lines (#!/...) // Skip shebang lines (#!/...)
if (trimmedLine.startsWith('#!')) { if (trimmedLine.startsWith('#!')) {
return continue
} }
// Allow empty comments or comments with exactly one space // Allow empty comments or comments with exactly one space
@@ -75,7 +76,7 @@ export const codeAnnotationCommentSpacing = {
) )
} }
// Single space or empty - this is correct // Single space or empty - this is correct
return continue
} else { } else {
// No space after comment character - this is an error // No space after comment character - this is an error
const lineNumber: number = token.lineNumber + index + 1 const lineNumber: number = token.lineNumber + index + 1
@@ -97,7 +98,7 @@ export const codeAnnotationCommentSpacing = {
) )
} }
} }
}) }
}) })
}, },
} }

View File

@@ -19,15 +19,15 @@ export const expiredContent: Rule = {
(token: MarkdownToken) => token.type === 'inline' || token.type === 'html_block', (token: MarkdownToken) => token.type === 'inline' || token.type === 'html_block',
) )
tokensToCheck.forEach((token: MarkdownToken) => { for (const token of tokensToCheck) {
// Looking for just opening tag with format: // Looking for just opening tag with format:
// <!-- expires yyyy-mm-dd --> // <!-- expires yyyy-mm-dd -->
const match = token.content?.match(/<!--\s*expires\s(\d\d\d\d)-(\d\d)-(\d\d)\s*-->/) const match = token.content?.match(/<!--\s*expires\s(\d\d\d\d)-(\d\d)-(\d\d)\s*-->/)
if (!match || !token.content) return if (!match || !token.content) continue
const expireDate = new Date(match.splice(1, 3).join(' ')) const expireDate = new Date(match.splice(1, 3).join(' '))
const today = new Date() const today = new Date()
if (today < expireDate) return if (today < expireDate) continue
// We want the content split by line since not all token.content is in one line // We want the content split by line since not all token.content is in one line
// to get the correct range of the expired content. Below is how markdownlint // to get the correct range of the expired content. Below is how markdownlint
@@ -44,7 +44,7 @@ export const expiredContent: Rule = {
[startRange, match[0].length], [startRange, match[0].length],
null, // No fix possible null, // No fix possible
) )
}) }
}, },
} }
@@ -68,11 +68,11 @@ export const expiringSoon: Rule = {
(token: MarkdownToken) => token.type === 'inline' || token.type === 'html_block', (token: MarkdownToken) => token.type === 'inline' || token.type === 'html_block',
) )
tokensToCheck.forEach((token: MarkdownToken) => { for (const token of tokensToCheck) {
// Looking for just opening tag with format: // Looking for just opening tag with format:
// <!-- expires yyyy-mm-dd --> // <!-- expires yyyy-mm-dd -->
const match = token.content?.match(/<!--\s*expires\s(\d\d\d\d)-(\d\d)-(\d\d)\s*-->/) const match = token.content?.match(/<!--\s*expires\s(\d\d\d\d)-(\d\d)-(\d\d)\s*-->/)
if (!match || !token.content) return if (!match || !token.content) continue
const expireDate = new Date(match.splice(1, 3).join(' ')) const expireDate = new Date(match.splice(1, 3).join(' '))
const today = new Date() const today = new Date()
@@ -80,7 +80,7 @@ export const expiringSoon: Rule = {
futureDate.setDate(today.getDate() + DAYS_TO_WARN_BEFORE_EXPIRED) futureDate.setDate(today.getDate() + DAYS_TO_WARN_BEFORE_EXPIRED)
// Don't set warning if the content is already expired or // Don't set warning if the content is already expired or
// if the content expires later than the DAYS_TO_WARN_BEFORE_EXPIRED // if the content expires later than the DAYS_TO_WARN_BEFORE_EXPIRED
if (today > expireDate || expireDate > futureDate) return if (today > expireDate || expireDate > futureDate) continue
addError( addError(
onError, onError,
@@ -90,6 +90,6 @@ export const expiringSoon: Rule = {
[token.content.indexOf(match[0]) + 1, match[0].length], [token.content.indexOf(match[0]) + 1, match[0].length],
null, // No fix possible null, // No fix possible
) )
}) }
}, },
} }

View File

@@ -81,7 +81,7 @@ export const frontmatterLandingRecommended = {
const duplicates: string[] = [] const duplicates: string[] = []
const invalidPaths: string[] = [] const invalidPaths: string[] = []
fm.recommended.forEach((item: string) => { for (const item of fm.recommended) {
if (seen.has(item)) { if (seen.has(item)) {
duplicates.push(item) duplicates.push(item)
} else { } else {
@@ -92,7 +92,7 @@ export const frontmatterLandingRecommended = {
if (!isValidArticlePath(item, params.name)) { if (!isValidArticlePath(item, params.name)) {
invalidPaths.push(item) invalidPaths.push(item)
} }
}) }
if (duplicates.length > 0) { if (duplicates.length > 0) {
addError( addError(

View File

@@ -23,8 +23,8 @@ export const frontmatterVersionsWhitespace: Rule = {
if (fmStartIndex === -1) return if (fmStartIndex === -1) return
// Check each version entry for whitespace issues // Check each version entry for whitespace issues
Object.entries(versionsObj).forEach(([key, value]) => { for (const [key, value] of Object.entries(versionsObj)) {
if (typeof value !== 'string') return if (typeof value !== 'string') continue
const hasUnwantedWhitespace = checkForUnwantedWhitespace(value) const hasUnwantedWhitespace = checkForUnwantedWhitespace(value)
if (hasUnwantedWhitespace) { if (hasUnwantedWhitespace) {
@@ -55,7 +55,7 @@ export const frontmatterVersionsWhitespace: Rule = {
) )
} }
} }
}) }
}, },
} }

View File

@@ -63,9 +63,11 @@ export const journeyTracksGuidePathExists = {
const journeyTracksLineNumber = params.lines.indexOf(journeyTracksLine) + 1 const journeyTracksLineNumber = params.lines.indexOf(journeyTracksLine) + 1
fm.journeyTracks.forEach((track: any, trackIndex: number) => { for (let trackIndex = 0; trackIndex < fm.journeyTracks.length; trackIndex++) {
const track: any = fm.journeyTracks[trackIndex]
if (track.guides && Array.isArray(track.guides)) { if (track.guides && Array.isArray(track.guides)) {
track.guides.forEach((guide: string, guideIndex: number) => { for (let guideIndex = 0; guideIndex < track.guides.length; guideIndex++) {
const guide: string = track.guides[guideIndex]
if (typeof guide === 'string') { if (typeof guide === 'string') {
if (!isValidGuidePath(guide, params.name)) { if (!isValidGuidePath(guide, params.name)) {
addError( addError(
@@ -76,8 +78,8 @@ export const journeyTracksGuidePathExists = {
) )
} }
} }
}) }
} }
}) }
}, },
} }

View File

@@ -22,7 +22,8 @@ export const journeyTracksLiquid = {
? params.lines.indexOf(journeyTracksLine) + 1 ? params.lines.indexOf(journeyTracksLine) + 1
: 1 : 1
fm.journeyTracks.forEach((track: any, trackIndex: number) => { for (let trackIndex = 0; trackIndex < fm.journeyTracks.length; trackIndex++) {
const track: any = fm.journeyTracks[trackIndex]
// Try to find the line number for this specific journey track so we can use that for the error // Try to find the line number for this specific journey track so we can use that for the error
// line number. Getting the exact line number is probably more work than it's worth for this // line number. Getting the exact line number is probably more work than it's worth for this
// particular rule. // particular rule.
@@ -57,7 +58,7 @@ export const journeyTracksLiquid = {
{ name: 'description', value: track.description }, { name: 'description', value: track.description },
] ]
properties.forEach((prop) => { for (const prop of properties) {
if (prop.value && typeof prop.value === 'string') { if (prop.value && typeof prop.value === 'string') {
try { try {
liquid.parse(prop.value) liquid.parse(prop.value)
@@ -70,10 +71,11 @@ export const journeyTracksLiquid = {
) )
} }
} }
}) }
if (track.guides && Array.isArray(track.guides)) { if (track.guides && Array.isArray(track.guides)) {
track.guides.forEach((guide: string, guideIndex: number) => { for (let guideIndex = 0; guideIndex < track.guides.length; guideIndex++) {
const guide: string = track.guides[guideIndex]
if (typeof guide === 'string') { if (typeof guide === 'string') {
try { try {
liquid.parse(guide) liquid.parse(guide)
@@ -86,8 +88,8 @@ export const journeyTracksLiquid = {
) )
} }
} }
}) }
} }
}) }
}, },
} }

View File

@@ -48,11 +48,12 @@ export const journeyTracksUniqueIds = {
// Track seen journey track IDs and line number for error reporting // Track seen journey track IDs and line number for error reporting
const seenIds = new Map<string, number>() const seenIds = new Map<string, number>()
fm.journeyTracks.forEach((track: any, index: number) => { for (let index = 0; index < fm.journeyTracks.length; index++) {
if (!track || typeof track !== 'object') return const track: any = fm.journeyTracks[index]
if (!track || typeof track !== 'object') continue
const trackId = track.id const trackId = track.id
if (!trackId || typeof trackId !== 'string') return if (!trackId || typeof trackId !== 'string') continue
const currentLineNumber = getTrackLineNumber(index) const currentLineNumber = getTrackLineNumber(index)
@@ -66,6 +67,6 @@ export const journeyTracksUniqueIds = {
} else { } else {
seenIds.set(trackId, currentLineNumber) seenIds.set(trackId, currentLineNumber)
} }
}) }
}, },
} }

View File

@@ -128,7 +128,7 @@ function validateIfversionConditionals(cond: string, possibleVersionNames: Set<s
// Note that Lengths 1 and 2 may be used with feature-based versioning, but NOT Length 3. // Note that Lengths 1 and 2 may be used with feature-based versioning, but NOT Length 3.
const condParts = cond.split(/ (or|and) /).filter((part) => !(part === 'or' || part === 'and')) const condParts = cond.split(/ (or|and) /).filter((part) => !(part === 'or' || part === 'and'))
condParts.forEach((str) => { for (const str of condParts) {
const strParts = str.split(' ') const strParts = str.split(' ')
// if length = 1, this should be a valid short version or feature version name. // if length = 1, this should be a valid short version or feature version name.
if (strParts.length === 1) { if (strParts.length === 1) {
@@ -192,7 +192,7 @@ function validateIfversionConditionals(cond: string, possibleVersionNames: Set<s
) )
} }
} }
}) }
return errors return errors
} }

View File

@@ -54,7 +54,7 @@ export const thirdPartyActionPinning: Rule = {
const steps = getWorkflowSteps(yamlObj) const steps = getWorkflowSteps(yamlObj)
if (!steps.some((step) => step.uses)) return if (!steps.some((step) => step.uses)) return
steps.forEach((step) => { for (const step of steps) {
if (step.uses) { if (step.uses) {
const actionMatch = step.uses.match(actionRegex) const actionMatch = step.uses.match(actionRegex)
if (actionMatch) { if (actionMatch) {
@@ -71,7 +71,7 @@ export const thirdPartyActionPinning: Rule = {
} }
} }
} }
}) }
} catch (e) { } catch (e) {
if (e instanceof yaml.YAMLException) { if (e instanceof yaml.YAMLException) {
console.log('YAML Exception file:', params.name) console.log('YAML Exception file:', params.name)

View File

@@ -43,7 +43,7 @@ export const yamlScheduledJobs: Rule = {
if (!yamlObj.on) return if (!yamlObj.on) return
if (!yamlObj.on.schedule) return if (!yamlObj.on.schedule) return
yamlObj.on.schedule.forEach((schedule: YamlSchedule) => { for (const schedule of yamlObj.on.schedule) {
if (schedule.cron.split(' ')[0] === '0') { if (schedule.cron.split(' ')[0] === '0') {
addError( addError(
onError, onError,
@@ -57,13 +57,13 @@ export const yamlScheduledJobs: Rule = {
addError( addError(
onError, onError,
getLineNumber(token.content!, schedule.cron) + token.lineNumber, getLineNumber(token.content!, schedule.cron) + token.lineNumber,
`YAML scheduled workflow must be unique`, `YAML scheduled workflow must not use the same cron schedule as another workflow`,
schedule.cron, schedule.cron,
) )
} else {
scheduledYamlJobs.push(schedule.cron)
} }
})
scheduledYamlJobs.push(schedule.cron)
}
}) })
}, },
} }

View File

@@ -58,20 +58,22 @@ childProcess.on('close', (code: number | null) => {
) )
console.log(`${Object.values(markdownViolations).flat().length} violations found.`) console.log(`${Object.values(markdownViolations).flat().length} violations found.`)
Object.entries(markdownViolations).forEach( const violationEntries = Object.entries(markdownViolations) as [
([fileName, results]: [string, Array<{ lineNumber: number }>]) => { string,
console.log(fileName) Array<{ lineNumber: number }>,
console.log(results) ][]
const fileLines = fs.readFileSync(fileName, 'utf8').split('\n') for (const [fileName, results] of violationEntries) {
results.forEach((result) => { console.log(fileName)
matchingRulesFound++ console.log(results)
const lineIndex = result.lineNumber - 1 const fileLines = fs.readFileSync(fileName, 'utf8').split('\n')
const offendingLine = fileLines[lineIndex] for (const result of results) {
fileLines[lineIndex] = offendingLine.concat(` <!-- markdownlint-disable-line ${rule} -->`) matchingRulesFound++
}) const lineIndex = result.lineNumber - 1
fs.writeFileSync(fileName, fileLines.join('\n'), 'utf8') const offendingLine = fileLines[lineIndex]
}, fileLines[lineIndex] = offendingLine.concat(` <!-- markdownlint-disable-line ${rule} -->`)
) }
fs.writeFileSync(fileName, fileLines.join('\n'), 'utf8')
}
console.log(`${matchingRulesFound} violations ignored.`) console.log(`${matchingRulesFound} violations ignored.`)
}) })

View File

@@ -197,7 +197,7 @@ async function main() {
customRules: configuredRules.yml, customRules: configuredRules.yml,
})) as LintResults })) as LintResults
Object.entries(resultYmlFile).forEach(([key, value]) => { for (const [key, value] of Object.entries(resultYmlFile)) {
if ((value as LintError[]).length) { if ((value as LintError[]).length) {
const errors = (value as LintError[]).map((error) => { const errors = (value as LintError[]).map((error) => {
// Autofixing would require us to write the changes back to the YML // Autofixing would require us to write the changes back to the YML
@@ -209,7 +209,7 @@ async function main() {
}) })
resultYml[key] = errors resultYml[key] = errors
} }
}) }
} }
// There are no collisions when assigning the results to the new object // There are no collisions when assigning the results to the new object
@@ -219,10 +219,10 @@ async function main() {
// Merge in the results for frontmatter tests, which could be // Merge in the results for frontmatter tests, which could be
// in a file that already exists as a key in the `results` object. // in a file that already exists as a key in the `results` object.
Object.entries(resultFrontmatter).forEach(([key, value]) => { for (const [key, value] of Object.entries(resultFrontmatter)) {
if (results[key]) results[key].push(...(value as LintError[])) if (results[key]) results[key].push(...(value as LintError[]))
else results[key] = value as LintError[] else results[key] = value as LintError[]
}) }
// Apply markdownlint fixes if available and rewrite the files // Apply markdownlint fixes if available and rewrite the files
let countFixedFiles = 0 let countFixedFiles = 0
@@ -476,7 +476,7 @@ function reportSummaryByRule(results: LintResults, config: LintConfig): void {
// the default property is not actually a rule // the default property is not actually a rule
delete ruleCount.default delete ruleCount.default
Object.keys(results).forEach((key) => { for (const key of Object.keys(results)) {
if (results[key].length > 0) { if (results[key].length > 0) {
for (const flaw of results[key]) { for (const flaw of results[key]) {
const ruleName = flaw.ruleNames[1] const ruleName = flaw.ruleNames[1]
@@ -485,7 +485,7 @@ function reportSummaryByRule(results: LintResults, config: LintConfig): void {
ruleCount[ruleName] = count + 1 ruleCount[ruleName] = count + 1
} }
} }
}) }
} }
/* /*
@@ -498,26 +498,26 @@ function getFormattedResults(
isInPrecommitMode: boolean, isInPrecommitMode: boolean,
): FormattedResults { ): FormattedResults {
const output: FormattedResults = {} const output: FormattedResults = {}
Object.entries(allResults) const filteredResults = Object.entries(allResults)
// Each result key always has an array value, but it may be empty // Each result key always has an array value, but it may be empty
.filter(([, results]) => results.length) .filter(([, results]) => results.length)
.forEach(([key, fileResults]) => { for (const [key, fileResults] of filteredResults) {
if (verbose) { if (verbose) {
output[key] = fileResults.map((flaw: LintError) => formatResult(flaw, isInPrecommitMode)) output[key] = fileResults.map((flaw: LintError) => formatResult(flaw, isInPrecommitMode))
} else { } else {
const formattedResults = fileResults.map((flaw: LintError) => const formattedResults = fileResults.map((flaw: LintError) =>
formatResult(flaw, isInPrecommitMode), formatResult(flaw, isInPrecommitMode),
) )
// Only add the file to output if there are results after filtering // Only add the file to output if there are results after filtering
if (formattedResults.length > 0) { if (formattedResults.length > 0) {
const errors = formattedResults.filter((result) => result.severity === 'error') const errors = formattedResults.filter((result) => result.severity === 'error')
const warnings = formattedResults.filter((result) => result.severity === 'warning') const warnings = formattedResults.filter((result) => result.severity === 'warning')
const sortedResult = [...errors, ...warnings] const sortedResult = [...errors, ...warnings]
output[key] = [...sortedResult] output[key] = [...sortedResult]
}
} }
}) }
}
return output return output
} }

View File

@@ -46,9 +46,9 @@ describe.skip('category pages', () => {
// Combine those to fit vitest's `.each` usage // Combine those to fit vitest's `.each` usage
const productTuples = zip(productNames, productIndices) as [string, string][] const productTuples = zip(productNames, productIndices) as [string, string][]
// Use a regular forEach loop to generate the `describe(...)` blocks // Use a regular for...of loop to generate the `describe(...)` blocks
// otherwise, if one of them has no categories, the tests will fail. // otherwise, if one of them has no categories, the tests will fail.
productTuples.forEach((tuple) => { for (const tuple of productTuples) {
const [, productIndex] = tuple const [, productIndex] = tuple
// Get links included in product index page. // Get links included in product index page.
// Each link corresponds to a product subdirectory (category). // Each link corresponds to a product subdirectory (category).
@@ -196,11 +196,11 @@ describe.skip('category pages', () => {
}) })
test('contains only articles and subcategories with versions that are also available in the parent category', () => { test('contains only articles and subcategories with versions that are also available in the parent category', () => {
Object.entries(articleVersions).forEach(([articleName, versions]) => { for (const [articleName, versions] of Object.entries(articleVersions)) {
const unexpectedVersions = difference(versions, categoryVersions) const unexpectedVersions = difference(versions, categoryVersions)
const errorMessage = `${articleName} has versions that are not available in parent category` const errorMessage = `${articleName} has versions that are not available in parent category`
expect(unexpectedVersions.length, errorMessage).toBe(0) expect(unexpectedVersions.length, errorMessage).toBe(0)
}) }
}) })
test('slugified title matches parent directory name', () => { test('slugified title matches parent directory name', () => {
@@ -229,7 +229,7 @@ describe.skip('category pages', () => {
}) })
}, },
) )
}) }
}) })
function getPath(productDir: string, link: string, filename: string) { function getPath(productDir: string, link: string, filename: string) {

View File

@@ -30,14 +30,14 @@ describe('lint learning tracks', () => {
// Using any[] for toLint since it contains mixed string content from various YAML properties // Using any[] for toLint since it contains mixed string content from various YAML properties
const toLint: any[] = [] const toLint: any[] = []
// Using any for destructured params as YAML structure varies across different learning track files // Using any for destructured params as YAML structure varies across different learning track files
Object.values(yamlContent).forEach(({ title, description }: any) => { for (const { title, description } of Object.values(yamlContent) as any[]) {
toLint.push(title) toLint.push(title)
toLint.push(description) toLint.push(description)
}) }
toLint.forEach((element) => { for (const element of toLint) {
expect(() => liquid.parse(element), `${element} contains invalid liquid`).not.toThrow() expect(() => liquid.parse(element), `${element} contains invalid liquid`).not.toThrow()
}) }
}) })
}) })
}) })

View File

@@ -43,14 +43,14 @@ describe('data references', () => {
variables.map(async (variablesPerFile) => { variables.map(async (variablesPerFile) => {
const variableRefs = getDataReferences(JSON.stringify(variablesPerFile)) const variableRefs = getDataReferences(JSON.stringify(variablesPerFile))
variableRefs.forEach((key: string) => { for (const key of variableRefs) {
const value = getDataByLanguage(key, 'en') const value = getDataByLanguage(key, 'en')
if (typeof value !== 'string') { if (typeof value !== 'string') {
const filename = getFilenameByValue(allVariables, variablesPerFile) const filename = getFilenameByValue(allVariables, variablesPerFile)
const variableFile = path.join('data/variables', filename || '') const variableFile = path.join('data/variables', filename || '')
errors.push({ key, value, variableFile }) errors.push({ key, value, variableFile })
} }
}) }
}), }),
) )

View File

@@ -153,7 +153,7 @@ This is a test.
describe(frontmatterVersionsWhitespace.names.join(' - '), () => { describe(frontmatterVersionsWhitespace.names.join(' - '), () => {
describe('valid cases', () => { describe('valid cases', () => {
validCases.forEach(({ name, content }) => { for (const { name, content } of validCases) {
test(`${name} should pass`, async () => { test(`${name} should pass`, async () => {
const result = await runRule(frontmatterVersionsWhitespace, { const result = await runRule(frontmatterVersionsWhitespace, {
strings: { content }, strings: { content },
@@ -161,11 +161,11 @@ describe(frontmatterVersionsWhitespace.names.join(' - '), () => {
}) })
expect(result.content.length).toBe(0) expect(result.content.length).toBe(0)
}) })
}) }
}) })
describe('invalid cases', () => { describe('invalid cases', () => {
invalidCases.forEach(({ name, content, expectedErrors, expectedMessage }) => { for (const { name, content, expectedErrors, expectedMessage } of invalidCases) {
test(`${name} should fail`, async () => { test(`${name} should fail`, async () => {
const result = await runRule(frontmatterVersionsWhitespace, { const result = await runRule(frontmatterVersionsWhitespace, {
strings: { content }, strings: { content },
@@ -177,7 +177,7 @@ describe(frontmatterVersionsWhitespace.names.join(' - '), () => {
expect(result.content[0].errorDetail).toBe(expectedMessage) expect(result.content[0].errorDetail).toBe(expectedMessage)
} }
}) })
}) }
}) })
describe('fixable errors', () => { describe('fixable errors', () => {

View File

@@ -99,10 +99,11 @@ async function selectFromOptions(
promptFn: (question: string) => Promise<string>, promptFn: (question: string) => Promise<string>,
): Promise<string> { ): Promise<string> {
console.log(chalk.yellow(`\n${message} (${paramName}):`)) console.log(chalk.yellow(`\n${message} (${paramName}):`))
options.forEach((option, index) => { for (let index = 0; index < options.length; index++) {
const option = options[index]
const letter = String.fromCharCode(97 + index) // 97 is 'a' in ASCII const letter = String.fromCharCode(97 + index) // 97 is 'a' in ASCII
console.log(chalk.white(` ${letter}. ${option}`)) console.log(chalk.white(` ${letter}. ${option}`))
}) }
let attempts = 0 let attempts = 0
while (true) { while (true) {
@@ -201,11 +202,11 @@ function validateCTAParams(params: CTAParams): { isValid: boolean; errors: strin
function buildCTAUrl(baseUrl: string, params: CTAParams): string { function buildCTAUrl(baseUrl: string, params: CTAParams): string {
const url = new URL(baseUrl) const url = new URL(baseUrl)
Object.entries(params).forEach(([key, value]) => { for (const [key, value] of Object.entries(params)) {
if (value) { if (value) {
url.searchParams.set(key, value) url.searchParams.set(key, value)
} }
}) }
return url.toString() return url.toString()
} }
@@ -277,11 +278,11 @@ export function convertOldCTAUrl(oldUrl: string): { newUrl: string; notes: strin
newUrl.searchParams.delete('ref_page') newUrl.searchParams.delete('ref_page')
// Add new CTA parameters // Add new CTA parameters
Object.entries(newParams).forEach(([key, value]) => { for (const [key, value] of Object.entries(newParams)) {
if (value) { if (value) {
newUrl.searchParams.set(key, value) newUrl.searchParams.set(key, value)
} }
}) }
// The URL constructor may add a slash before the question mark in // The URL constructor may add a slash before the question mark in
// "github.com?foo", but we don't want that. First, check if original // "github.com?foo", but we don't want that. First, check if original
@@ -417,7 +418,9 @@ async function interactiveBuilder(): Promise<void> {
if (!validation.isValid) { if (!validation.isValid) {
console.log(chalk.red('\n❌ Validation Errors:')) console.log(chalk.red('\n❌ Validation Errors:'))
validation.errors.forEach((error) => console.log(chalk.red(`${error}`))) for (const error of validation.errors) {
console.log(chalk.red(`${error}`))
}
rl.close() rl.close()
return return
} }
@@ -428,11 +431,11 @@ async function interactiveBuilder(): Promise<void> {
console.log(chalk.green('\n✅ CTA URL generated successfully!')) console.log(chalk.green('\n✅ CTA URL generated successfully!'))
console.log(chalk.white.bold('\nParameters summary:')) console.log(chalk.white.bold('\nParameters summary:'))
Object.entries(params).forEach(([key, value]) => { for (const [key, value] of Object.entries(params)) {
if (value) { if (value) {
console.log(chalk.white(` ${key}: ${value}`)) console.log(chalk.white(` ${key}: ${value}`))
} }
}) }
console.log(chalk.white.bold('\nYour CTA URL:')) console.log(chalk.white.bold('\nYour CTA URL:'))
console.log(chalk.cyan(ctaUrl)) console.log(chalk.cyan(ctaUrl))
@@ -474,7 +477,9 @@ async function convertUrls(options: { url?: string; quiet?: boolean }): Promise<
if (!validation.isValid) { if (!validation.isValid) {
console.log(chalk.red('\n❌ Validation errors in converted URL:')) console.log(chalk.red('\n❌ Validation errors in converted URL:'))
validation.errors.forEach((message) => console.log(chalk.red(`${message}`))) for (const message of validation.errors) {
console.log(chalk.red(`${message}`))
}
} }
} catch (validationError) { } catch (validationError) {
console.log(chalk.red(`\n❌ Failed to validate new URL: ${validationError}`)) console.log(chalk.red(`\n❌ Failed to validate new URL: ${validationError}`))
@@ -482,7 +487,9 @@ async function convertUrls(options: { url?: string; quiet?: boolean }): Promise<
if (result.notes.length) { if (result.notes.length) {
console.log(chalk.white('\n👉 Notes:')) console.log(chalk.white('\n👉 Notes:'))
result.notes.forEach((note) => console.log(` ${note}`)) for (const note of result.notes) {
console.log(` ${note}`)
}
} }
} else { } else {
if (!options.quiet) { if (!options.quiet) {
@@ -534,12 +541,14 @@ async function validateUrl(options: { url?: string }): Promise<void> {
if (validation.isValid) { if (validation.isValid) {
console.log(chalk.green('\n✅ URL is valid')) console.log(chalk.green('\n✅ URL is valid'))
console.log(chalk.white('\nCTA parameters found:')) console.log(chalk.white('\nCTA parameters found:'))
Object.entries(ctaParams).forEach(([key, value]) => { for (const [key, value] of Object.entries(ctaParams)) {
console.log(chalk.white(` ${key}: ${value}`)) console.log(chalk.white(` ${key}: ${value}`))
}) }
} else { } else {
console.log(chalk.red('\n❌ Validation errors:')) console.log(chalk.red('\n❌ Validation errors:'))
validation.errors.forEach((message) => console.log(chalk.red(`${message}`))) for (const message of validation.errors) {
console.log(chalk.red(`${message}`))
}
console.log( console.log(
chalk.yellow( chalk.yellow(
'\n💡 Try: npm run cta-builder -- convert --url "your-url" to auto-fix old format URLs', '\n💡 Try: npm run cta-builder -- convert --url "your-url" to auto-fix old format URLs',
@@ -596,9 +605,9 @@ async function buildProgrammaticCTA(options: {
const validation = validateCTAParams(params) const validation = validateCTAParams(params)
if (!validation.isValid) { if (!validation.isValid) {
// Output validation errors to stderr and exit with error code // Output validation errors to stderr and exit with error code
validation.errors.forEach((error) => { for (const error of validation.errors) {
console.error(`Validation error: ${error}`) console.error(`Validation error: ${error}`)
}) }
process.exit(1) process.exit(1)
} }

View File

@@ -370,7 +370,9 @@ function removeFromChildren(oldPath: string, opts: MoveOptions): PositionInfo {
const childGroupPositions: number[][] = [] const childGroupPositions: number[][] = []
;((data && data[CHILDGROUPS_KEY]) || []).forEach((group: any, i: number) => { const childGroups = (data && data[CHILDGROUPS_KEY]) || []
for (let i = 0; i < childGroups.length; i++) {
const group = childGroups[i]
if (group.children) { if (group.children) {
group.children = group.children.filter((entry: any, j: number) => { group.children = group.children.filter((entry: any, j: number) => {
if (entry === oldName || entry === `/${oldName}`) { if (entry === oldName || entry === `/${oldName}`) {
@@ -380,7 +382,7 @@ function removeFromChildren(oldPath: string, opts: MoveOptions): PositionInfo {
return true return true
}) })
} }
}) }
if (data) { if (data) {
fs.writeFileSync( fs.writeFileSync(
@@ -449,10 +451,11 @@ function moveFiles(files: FileTuple[], opts: MoveOptions) {
for (const [oldPath] of files) { for (const [oldPath] of files) {
const fileContent = fs.readFileSync(oldPath, 'utf-8') const fileContent = fs.readFileSync(oldPath, 'utf-8')
const { errors } = fm(fileContent, { filepath: oldPath }) const { errors } = fm(fileContent, { filepath: oldPath })
errors.forEach((error, i) => { for (let i = 0; i < errors.length; i++) {
const error = errors[i]
if (!i) console.warn(chalk.yellow(`Error parsing file (${oldPath}) frontmatter:`)) if (!i) console.warn(chalk.yellow(`Error parsing file (${oldPath}) frontmatter:`))
console.error(`${chalk.red(error.message)}: ${chalk.yellow(error.reason)}`) console.error(`${chalk.red(error.message)}: ${chalk.yellow(error.reason)}`)
}) }
if (errors.length > 0) throw new Error('There were more than 0 parse errors') if (errors.length > 0) throw new Error('There were more than 0 parse errors')
} }
@@ -668,12 +671,13 @@ function changeFeaturedLinks(oldHref: string, newHref: string): void {
if (key === 'popularHeading') { if (key === 'popularHeading') {
continue continue
} }
entries.forEach((entry, i) => { for (let i = 0; i < entries.length; i++) {
const entry = entries[i]
if (regex.test(entry)) { if (regex.test(entry)) {
entries[i] = entry.replace(regex, `${newHref}$1`) entries[i] = entry.replace(regex, `${newHref}$1`)
changed = true changed = true
} }
}) }
} }
if (changed) { if (changed) {

View File

@@ -133,11 +133,11 @@ describe('renderContent', () => {
const html = await renderContent(template) const html = await renderContent(template)
const $ = cheerio.load(html, { xmlMode: true }) const $ = cheerio.load(html, { xmlMode: true })
;[1, 2, 3, 4, 5].forEach((level) => { for (const level of [1, 2, 3, 4, 5]) {
expect( expect(
$(`h${level}#this-is-a-level-${level} a[href="#this-is-a-level-${level}"]`).length, $(`h${level}#this-is-a-level-${level} a[href="#this-is-a-level-${level}"]`).length,
).toBe(1) ).toBe(1)
}) }
}) })
test('does syntax highlighting', async () => { test('does syntax highlighting', async () => {

View File

@@ -54,7 +54,7 @@ export default function dataDirectory(
fs.readFileSync(filename, 'utf8'), fs.readFileSync(filename, 'utf8'),
]) ])
files.forEach(([filename, fileContent]) => { for (const [filename, fileContent] of files) {
// derive `foo.bar.baz` object key from `foo/bar/baz.yml` filename // derive `foo.bar.baz` object key from `foo/bar/baz.yml` filename
const key = filenameToKey(path.relative(dir, filename)) const key = filenameToKey(path.relative(dir, filename))
const extension = path.extname(filename).toLowerCase() const extension = path.extname(filename).toLowerCase()
@@ -84,7 +84,7 @@ export default function dataDirectory(
setWith(data, key, matter(processedContent).content, Object) setWith(data, key, matter(processedContent).content, Object)
break break
} }
}) }
return data return data
} }

View File

@@ -72,7 +72,7 @@ const reusablesToMove: string[] = []
const imagesToMove: string[] = [] const imagesToMove: string[] = []
// 2. Add redirects to and update frontmatter in the to-be-migrated early access files BEFORE moving them. // 2. Add redirects to and update frontmatter in the to-be-migrated early access files BEFORE moving them.
filesToMigrate.forEach((filepath) => { for (const filepath of filesToMigrate) {
const { content, data } = frontmatter(fs.readFileSync(filepath, 'utf8')) const { content, data } = frontmatter(fs.readFileSync(filepath, 'utf8'))
const redirectString: string = filepath const redirectString: string = filepath
.replace('content/', '/') .replace('content/', '/')
@@ -95,12 +95,18 @@ filesToMigrate.forEach((filepath) => {
variablesToMove.push(...variables) variablesToMove.push(...variables)
reusablesToMove.push(...reusables) reusablesToMove.push(...reusables)
imagesToMove.push(...images) imagesToMove.push(...images)
}) }
// 3. Move the data files and images. // 3. Move the data files and images.
Array.from(new Set(variablesToMove)).forEach((varRef) => moveVariable(varRef)) for (const varRef of Array.from(new Set(variablesToMove))) {
Array.from(new Set(reusablesToMove)).forEach((varRef) => moveReusable(varRef)) moveVariable(varRef)
Array.from(new Set(imagesToMove)).forEach((imageRef) => moveImage(imageRef)) }
for (const varRef of Array.from(new Set(reusablesToMove))) {
moveReusable(varRef)
}
for (const imageRef of Array.from(new Set(imagesToMove))) {
moveImage(imageRef)
}
// 4. Move the content files. // 4. Move the content files.
execFileSync('mv', [oldPath, migratePath]) execFileSync('mv', [oldPath, migratePath])

View File

@@ -68,11 +68,11 @@ const destinationDirsMap: Record<string, string> = destinationDirNames.reduce(
) )
// Remove all existing early access directories from this repo // Remove all existing early access directories from this repo
destinationDirNames.forEach((dirName) => { for (const dirName of destinationDirNames) {
const destDir = destinationDirsMap[dirName] const destDir = destinationDirsMap[dirName]
rimraf.sync(destDir) rimraf.sync(destDir)
console.log(`- Removed symlink for early access directory '${dirName}' from this repo`) console.log(`- Removed symlink for early access directory '${dirName}' from this repo`)
}) }
// If removing symlinks, just stop here! // If removing symlinks, just stop here!
if (unlink) { if (unlink) {
@@ -84,8 +84,8 @@ if (unlink) {
// //
// Move the latest early access source directories into this repo // Move the latest early access source directories into this repo
destinationDirNames.forEach((dirName) => { for (const dirName of destinationDirNames) {
if (!earlyAccessLocalRepoDir) return if (!earlyAccessLocalRepoDir) continue
const sourceDir = path.join(earlyAccessLocalRepoDir, dirName) const sourceDir = path.join(earlyAccessLocalRepoDir, dirName)
const destDir = destinationDirsMap[dirName] const destDir = destinationDirsMap[dirName]
@@ -93,7 +93,7 @@ destinationDirNames.forEach((dirName) => {
// If the source directory doesn't exist, skip it // If the source directory doesn't exist, skip it
if (!fs.existsSync(sourceDir)) { if (!fs.existsSync(sourceDir)) {
console.warn(`Early access directory '${dirName}' does not exist. Skipping...`) console.warn(`Early access directory '${dirName}' does not exist. Skipping...`)
return continue
} }
// Create a symbolic link to the directory // Create a symbolic link to the directory
@@ -113,4 +113,4 @@ destinationDirNames.forEach((dirName) => {
} }
console.log(`+ Added symlink for early access directory '${dirName}' into this repo`) console.log(`+ Added symlink for early access directory '${dirName}' into this repo`)
}) }

View File

@@ -48,7 +48,7 @@ if (earlyAccessPath) {
// We also need to include any reusable files that are referenced in the selected content files. // We also need to include any reusable files that are referenced in the selected content files.
const referencedDataFiles: string[] = [] const referencedDataFiles: string[] = []
contentFiles.forEach((file) => { for (const file of contentFiles) {
const contents = fs.readFileSync(file, 'utf8') const contents = fs.readFileSync(file, 'utf8')
const dataRefs: string[] = contents.match(patterns.dataReference) || [] const dataRefs: string[] = contents.match(patterns.dataReference) || []
const filepaths: string[] = dataRefs const filepaths: string[] = dataRefs
@@ -62,7 +62,7 @@ if (earlyAccessPath) {
return path.posix.join(process.cwd(), 'data', `${filepath}.md`) return path.posix.join(process.cwd(), 'data', `${filepath}.md`)
}) })
referencedDataFiles.push(...filepaths) referencedDataFiles.push(...filepaths)
}) }
const dataFiles = allEarlyAccessFiles.filter((file) => { const dataFiles = allEarlyAccessFiles.filter((file) => {
return referencedDataFiles.some((f) => return referencedDataFiles.some((f) =>
@@ -74,7 +74,7 @@ if (earlyAccessPath) {
} }
// Update the EA content and data files // Update the EA content and data files
selectedFiles.forEach((file) => { for (const file of selectedFiles) {
const oldContents = fs.readFileSync(file, 'utf8') const oldContents = fs.readFileSync(file, 'utf8')
const dataRefs: string[] = oldContents.match(patterns.dataReference) || [] const dataRefs: string[] = oldContents.match(patterns.dataReference) || []
@@ -83,58 +83,54 @@ selectedFiles.forEach((file) => {
const replacements: Record<string, string> = {} const replacements: Record<string, string> = {}
if (add) { if (add) {
dataRefs // Since we're adding early-access to the path, filter for those that do not already include it
// Since we're adding early-access to the path, filter for those that do not already include it const dataRefsToAdd = dataRefs.filter((ref) => !ref.includes(' early-access.'))
.filter((dataRef) => !dataRef.includes(' early-access.')) for (const dataRef of dataRefsToAdd) {
// Add to the { oldRef: newRef } replacements object // Add to the { oldRef: newRef } replacements object
.forEach((dataRef) => { replacements[dataRef] = dataRef.replace(
replacements[dataRef] = dataRef.replace( /({% (?:data|indented_data_reference) )(.*)/,
/({% (?:data|indented_data_reference) )(.*)/, '$1early-access.$2',
'$1early-access.$2', )
) }
})
imageRefs // Since we're adding early-access to the path, filter for those that do not already include it
// Since we're adding early-access to the path, filter for those that do not already include it const imageRefsToAdd = imageRefs.filter((ref) => !ref.split('/').includes('early-access'))
.filter((imageRef) => !imageRef.split('/').includes('early-access')) for (const imageRef of imageRefsToAdd) {
// Add to the { oldRef: newRef } replacements object // Add to the { oldRef: newRef } replacements object
.forEach((imageRef) => { replacements[imageRef] = imageRef.replace('/assets/images/', '/assets/images/early-access/')
replacements[imageRef] = imageRef.replace('/assets/images/', '/assets/images/early-access/') }
})
} }
if (remove) { if (remove) {
dataRefs // Since we're removing early-access from the path, filter for those that include it
// Since we're removing early-access from the path, filter for those that include it const dataRefsToRemove = dataRefs.filter((ref) => ref.includes(' early-access.'))
.filter((dataRef) => dataRef.includes(' early-access.')) for (const dataRef of dataRefsToRemove) {
// Add to the { oldRef: newRef } replacements object // Add to the { oldRef: newRef } replacements object
.forEach((dataRef) => { replacements[dataRef] = dataRef.replace('early-access.', '').replace('-alt.', '.')
replacements[dataRef] = dataRef.replace('early-access.', '').replace('-alt.', '.') // replacements[dataRef] = dataRef.replace('early-access.', '')
// replacements[dataRef] = dataRef.replace('early-access.', '') }
})
imageRefs // Since we're removing early-access from the path, filter for those that include it
// Since we're removing early-access from the path, filter for those that include it const imageRefsToRemove = imageRefs.filter((ref) => ref.split('/').includes('early-access'))
.filter((imageRef) => imageRef.split('/').includes('early-access')) for (const imageRef of imageRefsToRemove) {
// Add to the { oldRef: newRef } replacements object // Add to the { oldRef: newRef } replacements object
.forEach((imageRef) => { replacements[imageRef] = imageRef.replace('/assets/images/early-access/', '/assets/images/')
replacements[imageRef] = imageRef.replace('/assets/images/early-access/', '/assets/images/') }
})
} }
// Return early if nothing to replace // Return early if nothing to replace
if (!Object.keys(replacements).length) { if (!Object.keys(replacements).length) {
return continue
} }
// Make the replacement in the content // Make the replacement in the content
let newContents = oldContents let newContents = oldContents
Object.entries(replacements).forEach(([oldRef, newRef]) => { for (const [oldRef, newRef] of Object.entries(replacements)) {
newContents = newContents.replace(new RegExp(escapeRegExp(oldRef), 'g'), newRef) newContents = newContents.replace(new RegExp(escapeRegExp(oldRef), 'g'), newRef)
}) }
// Write the updated content // Write the updated content
fs.writeFileSync(file, newContents) fs.writeFileSync(file, newContents)
}) }
console.log('Done! Run "git status" in your docs-early-access checkout to see the changes.\n') console.log('Done! Run "git status" in your docs-early-access checkout to see the changes.\n')

View File

@@ -332,11 +332,11 @@ async function waitForPageReady() {
} }
function initClipboardEvent() { function initClipboardEvent() {
;['copy', 'cut', 'paste'].forEach((verb) => { for (const verb of ['copy', 'cut', 'paste']) {
document.documentElement.addEventListener(verb, () => { document.documentElement.addEventListener(verb, () => {
sendEvent({ type: EventType.clipboard, clipboard_operation: verb }) sendEvent({ type: EventType.clipboard, clipboard_operation: verb })
}) })
}) }
} }
function initCopyButtonEvent() { function initCopyButtonEvent() {

View File

@@ -203,12 +203,12 @@ describe('ifversion', () => {
return !matchesPerVersion[version].includes(condition) return !matchesPerVersion[version].includes(condition)
}) })
wantedConditions.forEach((condition: string) => { for (const condition of wantedConditions as string[]) {
expect(html).toMatch(condition) expect(html).toMatch(condition)
}) }
unwantedConditions.forEach((condition: string) => { for (const condition of unwantedConditions as string[]) {
expect(html).not.toMatch(condition) expect(html).not.toMatch(condition)
}) }
}, },
) )
}) })

View File

@@ -23,7 +23,7 @@ const pages: { [key: string]: string } = {
} }
// create a test for each page, will eventually be separated into finer grain tests // create a test for each page, will eventually be separated into finer grain tests
Object.keys(pages).forEach((pageName) => { for (const pageName of Object.keys(pages)) {
test.describe(`${pageName}`, () => { test.describe(`${pageName}`, () => {
test('full page axe scan without experiments', async ({ page }) => { test('full page axe scan without experiments', async ({ page }) => {
await page.goto(pages[pageName]) await page.goto(pages[pageName])
@@ -35,6 +35,7 @@ Object.keys(pages).forEach((pageName) => {
expect(accessibilityScanResults.violations).toEqual([]) expect(accessibilityScanResults.violations).toEqual([])
}) })
}) })
test.describe(`${pageName} (with experiments)`, () => { test.describe(`${pageName} (with experiments)`, () => {
test('full page axe scan with experiments', async ({ page }) => { test('full page axe scan with experiments', async ({ page }) => {
await page.goto(pages[pageName]) await page.goto(pages[pageName])
@@ -46,4 +47,4 @@ Object.keys(pages).forEach((pageName) => {
expect(accessibilityScanResults.violations).toEqual([]) expect(accessibilityScanResults.violations).toEqual([])
}) })
}) })
}) }

View File

@@ -49,9 +49,10 @@ export default function ClientSideHighlightJS() {
} }
} }
}) })
for (const parent of Array.from( const codeElementParents = Array.from(
document.querySelectorAll<HTMLElement>(CODE_ELEMENTS_PARENT_SELECTOR), document.querySelectorAll<HTMLElement>(CODE_ELEMENTS_PARENT_SELECTOR),
)) { )
for (const parent of codeElementParents) {
const language = parent.dataset.highlight || 'json' const language = parent.dataset.highlight || 'json'
if (!SUPPORTED_LANGUAGES.includes(language)) { if (!SUPPORTED_LANGUAGES.includes(language)) {
if (process.env.NODE_ENV === 'development') { if (process.env.NODE_ENV === 'development') {

View File

@@ -55,11 +55,11 @@ export const UtmPreserver = () => {
const applyUtmToLinks = (): void => { const applyUtmToLinks = (): void => {
const links = document.querySelectorAll<HTMLAnchorElement>('a[href]') const links = document.querySelectorAll<HTMLAnchorElement>('a[href]')
links.forEach((link) => { for (const link of links) {
if (link.href && shouldPreserveUtm(link.href)) { if (link.href && shouldPreserveUtm(link.href)) {
link.href = addUtmParamsToUrl(link.href, utmParams) link.href = addUtmParamsToUrl(link.href, utmParams)
} }
}) }
} }
// Handle click events for dynamic link modification // Handle click events for dynamic link modification

View File

@@ -5,7 +5,7 @@ export default function copyCode() {
if (!buttons) return if (!buttons) return
buttons.forEach((button) => for (const button of buttons) {
button.addEventListener('click', async () => { button.addEventListener('click', async () => {
const codeId = (button as HTMLElement).dataset.clipboard const codeId = (button as HTMLElement).dataset.clipboard
if (!codeId) return if (!codeId) return
@@ -22,6 +22,6 @@ export default function copyCode() {
setTimeout(() => { setTimeout(() => {
button.classList.remove('copied') button.classList.remove('copied')
}, 2000) }, 2000)
}), })
) }
} }

View File

@@ -51,7 +51,7 @@ export default function toggleAnnotation() {
function setActive(annotationButtons: Array<Element>, targetMode?: string) { function setActive(annotationButtons: Array<Element>, targetMode?: string) {
const activeElements: Array<Element> = [] const activeElements: Array<Element> = []
targetMode = validateMode(targetMode) targetMode = validateMode(targetMode)
annotationButtons.forEach((el) => { for (const el of annotationButtons) {
if (el.getAttribute('value') === targetMode) { if (el.getAttribute('value') === targetMode) {
el.ariaCurrent = 'true' el.ariaCurrent = 'true'
el.classList.add('selected') el.classList.add('selected')
@@ -60,7 +60,7 @@ function setActive(annotationButtons: Array<Element>, targetMode?: string) {
el.removeAttribute('aria-current') el.removeAttribute('aria-current')
el.classList.remove('selected') el.classList.remove('selected')
} }
}) }
if (!activeElements.length) if (!activeElements.length)
throw new Error('No annotationBtn item is active for code annotation.') throw new Error('No annotationBtn item is active for code annotation.')
@@ -70,15 +70,15 @@ function setActive(annotationButtons: Array<Element>, targetMode?: string) {
// displays the chosen annotation mode // displays the chosen annotation mode
function displayAnnotationMode(annotationBtnItems: Array<Element>, targetMode?: string) { function displayAnnotationMode(annotationBtnItems: Array<Element>, targetMode?: string) {
if (!targetMode || targetMode === annotationMode.Beside) if (!targetMode || targetMode === annotationMode.Beside) {
annotationBtnItems.forEach((el) => { for (const el of annotationBtnItems) {
el.closest('.annotate')?.classList.replace('inline', 'beside') el.closest('.annotate')?.classList.replace('inline', 'beside')
}) }
else if (targetMode === annotationMode.Inline) } else if (targetMode === annotationMode.Inline) {
annotationBtnItems.forEach((el) => { for (const el of annotationBtnItems) {
el.closest('.annotate')?.classList.replace('beside', 'inline') el.closest('.annotate')?.classList.replace('beside', 'inline')
}) }
else throw new Error('Invalid target mode set for annotation.') } else throw new Error('Invalid target mode set for annotation.')
setActive(annotationBtnItems, targetMode) setActive(annotationBtnItems, targetMode)
} }

View File

@@ -11,7 +11,7 @@ export default function wrapCodeTerms() {
const codeTerms = document.querySelectorAll('#article-contents table code') const codeTerms = document.querySelectorAll('#article-contents table code')
if (!codeTerms) return if (!codeTerms) return
codeTerms.forEach((node) => { for (const node of codeTerms) {
// Do the wrapping on the inner text only. With anchor element children // Do the wrapping on the inner text only. With anchor element children
// we'll only handle the case where the code term only has a single child // we'll only handle the case where the code term only has a single child
// and that child is an anchor element. // and that child is an anchor element.
@@ -37,5 +37,5 @@ export default function wrapCodeTerms() {
} else { } else {
node.innerHTML = node.innerHTML.replace(oldText, newText) node.innerHTML = node.innerHTML.replace(oldText, newText)
} }
}) }
} }

View File

@@ -169,7 +169,9 @@ async function getMtime(filePath: string): Promise<number> {
function assertUniqueChildren(page: any): void { function assertUniqueChildren(page: any): void {
if (page.children.length !== new Set(page.children).size) { if (page.children.length !== new Set(page.children).size) {
const count: Record<string, number> = {} const count: Record<string, number> = {}
page.children.forEach((entry: string) => (count[entry] = 1 + (count[entry] || 0))) for (const entry of page.children) {
count[entry] = 1 + (count[entry] || 0)
}
let msg = `${page.relativePath} has duplicates in the 'children' key.` let msg = `${page.relativePath} has duplicates in the 'children' key.`
for (const [entry, times] of Object.entries(count)) { for (const [entry, times] of Object.entries(count)) {
if (times > 1) msg += ` '${entry}' is repeated ${times} times. ` if (times > 1) msg += ` '${entry}' is repeated ${times} times. `

View File

@@ -170,14 +170,16 @@ class Page {
// where as notations like `__GHES_DEPRECATED__[3]` // where as notations like `__GHES_DEPRECATED__[3]`
// or `__GHES_SUPPORTED__[0]` are static. // or `__GHES_SUPPORTED__[0]` are static.
if (opts.basePath.split(path.sep).includes('fixtures')) { if (opts.basePath.split(path.sep).includes('fixtures')) {
supported.forEach((version: string, i: number, arr: string[]) => { for (let i = 0; i < supported.length; i++) {
const version: string = supported[i]
markdown = markdown.replaceAll(`__GHES_SUPPORTED__[${i}]`, version) markdown = markdown.replaceAll(`__GHES_SUPPORTED__[${i}]`, version)
markdown = markdown.replaceAll(`__GHES_SUPPORTED__[-${arr.length - i}]`, version) markdown = markdown.replaceAll(`__GHES_SUPPORTED__[-${supported.length - i}]`, version)
}) }
deprecated.forEach((version: string, i: number, arr: string[]) => { for (let i = 0; i < deprecated.length; i++) {
const version: string = deprecated[i]
markdown = markdown.replaceAll(`__GHES_DEPRECATED__[${i}]`, version) markdown = markdown.replaceAll(`__GHES_DEPRECATED__[${i}]`, version)
markdown = markdown.replaceAll(`__GHES_DEPRECATED__[-${arr.length - i}]`, version) markdown = markdown.replaceAll(`__GHES_DEPRECATED__[-${deprecated.length - i}]`, version)
}) }
} }
return { return {

View File

@@ -48,7 +48,9 @@ function getBreadcrumbs(req: ExtendedRequest, isEarlyAccess: boolean) {
req.context.currentPath, req.context.currentPath,
req.context.currentProductTreeTitles, req.context.currentProductTreeTitles,
) )
;[...Array(cutoff)].forEach(() => breadcrumbsResult.shift()) for (let i = 0; i < cutoff; i++) {
breadcrumbsResult.shift()
}
return breadcrumbsResult return breadcrumbsResult
} }

View File

@@ -27,9 +27,9 @@ export default async function glossaries(req: ExtendedRequest, res: Response, ne
if (req.context.currentLanguage !== 'en') { if (req.context.currentLanguage !== 'en') {
const enGlossariesRaw: Glossary[] = getDataByLanguage('glossaries.external', 'en') const enGlossariesRaw: Glossary[] = getDataByLanguage('glossaries.external', 'en')
enGlossariesRaw.forEach(({ term, description }) => { for (const { term, description } of enGlossariesRaw) {
enGlossaryMap.set(term, description) enGlossaryMap.set(term, description)
}) }
} }
// The glossaries Yaml file contains descriptions that might contain // The glossaries Yaml file contains descriptions that might contain

View File

@@ -172,9 +172,8 @@ MyApp.getInitialProps = async (appContext: AppContext) => {
// Note, `req` will be undefined if this is the client-side rendering // Note, `req` will be undefined if this is the client-side rendering
// of a 500 page ("Ooops! It looks like something went wrong.") // of a 500 page ("Ooops! It looks like something went wrong.")
if (req?.context?.languages) { if (req?.context?.languages) {
for (const [langCode, langObj] of Object.entries( const languageEntries = Object.entries(req.context.languages as Record<string, LanguageItem>)
req.context.languages as Record<string, LanguageItem>, for (const [langCode, langObj] of languageEntries) {
)) {
// Only pick out the keys we actually need // Only pick out the keys we actually need
languagesContext.languages[langCode] = { languagesContext.languages[langCode] = {
name: langObj.name, name: langObj.name,

View File

@@ -20,7 +20,7 @@ describe('block robots', () => {
.filter((product) => product.wip) .filter((product) => product.wip)
.map((product) => product.id) .map((product) => product.id)
wipProductIds.forEach((id) => { for (const id of wipProductIds) {
const { href } = productMap[id] const { href } = productMap[id]
const blockedPaths = [ const blockedPaths = [
`/en${href}`, `/en${href}`,
@@ -30,10 +30,10 @@ describe('block robots', () => {
`/en/enterprise/${enterpriseServerReleases.oldestSupported}/user${href}`, `/en/enterprise/${enterpriseServerReleases.oldestSupported}/user${href}`,
] ]
blockedPaths.forEach((path) => { for (const path of blockedPaths) {
expect(allowIndex(path)).toBe(false) expect(allowIndex(path)).toBe(false)
}) }
}) }
}) })
test('disallows crawling of early access "hidden" products', async () => { test('disallows crawling of early access "hidden" products', async () => {
@@ -41,19 +41,19 @@ describe('block robots', () => {
.filter((product) => product.hidden) .filter((product) => product.hidden)
.map((product) => product.id) .map((product) => product.id)
hiddenProductIds.forEach((id) => { for (const id of hiddenProductIds) {
const { versions } = productMap[id] const { versions } = productMap[id]
if (!versions) return if (!versions) continue
const blockedPaths = versions const blockedPaths = versions
.map((version) => { .map((version) => {
return [`/en/${version}/${id}`, `/en/${version}/${id}/some-early-access-article`] return [`/en/${version}/${id}`, `/en/${version}/${id}/some-early-access-article`]
}) })
.flat() .flat()
blockedPaths.forEach((path) => { for (const path of blockedPaths) {
expect(allowIndex(path)).toBe(false) expect(allowIndex(path)).toBe(false)
}) }
}) }
}) })
test('allows crawling of non-WIP products', async () => { test('allows crawling of non-WIP products', async () => {
@@ -68,7 +68,7 @@ describe('block robots', () => {
}) })
test('disallows crawling of deprecated enterprise releases', async () => { test('disallows crawling of deprecated enterprise releases', async () => {
enterpriseServerReleases.deprecated.forEach((version) => { for (const version of enterpriseServerReleases.deprecated) {
const blockedPaths = [ const blockedPaths = [
`/en/enterprise-server@${version}/actions`, `/en/enterprise-server@${version}/actions`,
`/en/enterprise/${version}/actions`, `/en/enterprise/${version}/actions`,
@@ -76,9 +76,9 @@ describe('block robots', () => {
`/en/enterprise/${version}/actions/overview`, `/en/enterprise/${version}/actions/overview`,
] ]
blockedPaths.forEach((path) => { for (const path of blockedPaths) {
expect(allowIndex(path)).toBe(false) expect(allowIndex(path)).toBe(false)
}) }
}) }
}) })
}) })

View File

@@ -59,18 +59,18 @@ describe('pages module', () => {
const versionedRedirects: Array<{ path: string; file: string }> = [] const versionedRedirects: Array<{ path: string; file: string }> = []
// Page objects have dynamic properties from chain/lodash that aren't fully typed // Page objects have dynamic properties from chain/lodash that aren't fully typed
englishPages.forEach((page: any) => { for (const page of englishPages) {
page.redirect_from.forEach((redirect: string) => { for (const redirect of (page as any).redirect_from) {
page.applicableVersions.forEach((version: string) => { for (const version of (page as any).applicableVersions) {
const versioned = removeFPTFromPath(path.posix.join('/', version, redirect)) const versioned = removeFPTFromPath(path.posix.join('/', version, redirect))
versionedRedirects.push({ path: versioned, file: page.fullPath }) versionedRedirects.push({ path: versioned, file: (page as any).fullPath })
if (!redirectToFiles.has(versioned)) { if (!redirectToFiles.has(versioned)) {
redirectToFiles.set(versioned, new Set<string>()) redirectToFiles.set(versioned, new Set<string>())
} }
redirectToFiles.get(versioned)!.add(page.fullPath) redirectToFiles.get(versioned)!.add((page as any).fullPath)
}) }
}) }
}) }
// Only consider as duplicate if more than one unique file defines the same redirect // Only consider as duplicate if more than one unique file defines the same redirect
const duplicates = Array.from(redirectToFiles.entries()) const duplicates = Array.from(redirectToFiles.entries())

View File

@@ -166,10 +166,10 @@ describe('server', () => {
const categories = JSON.parse(res.body) const categories = JSON.parse(res.body)
expect(Array.isArray(categories)).toBe(true) expect(Array.isArray(categories)).toBe(true)
expect(categories.length).toBeGreaterThan(1) expect(categories.length).toBeGreaterThan(1)
categories.forEach((category: Category) => { for (const category of categories as Category[]) {
expect('name' in category).toBe(true) expect('name' in category).toBe(true)
expect('published_articles' in category).toBe(true) expect('published_articles' in category).toBe(true)
}) }
}) })
describeViaActionsOnly('Early Access articles', () => { describeViaActionsOnly('Early Access articles', () => {

View File

@@ -61,7 +61,7 @@ describe('siteTree', () => {
function validate(currentPage: Tree): void { function validate(currentPage: Tree): void {
const childPages: Tree[] = currentPage.childPages || [] const childPages: Tree[] = currentPage.childPages || []
childPages.forEach((childPage) => { for (const childPage of childPages) {
// Store page reference before validation to avoid type narrowing // Store page reference before validation to avoid type narrowing
const pageRef: Tree = childPage const pageRef: Tree = childPage
const isValid = siteTreeValidate(childPage) const isValid = siteTreeValidate(childPage)
@@ -76,5 +76,5 @@ function validate(currentPage: Tree): void {
// Run recursively until we run out of child pages // Run recursively until we run out of child pages
validate(pageRef) validate(pageRef)
}) }
} }

View File

@@ -142,7 +142,7 @@ async function createRedirectsFile(pageList: PageList, outputDirectory: string)
const redirectEntries: Array<[string, string]> = Object.entries(redirects) const redirectEntries: Array<[string, string]> = Object.entries(redirects)
redirectEntries.forEach(([oldPath, newPath]) => { for (let [oldPath, newPath] of redirectEntries) {
// remove any liquid variables that sneak in // remove any liquid variables that sneak in
oldPath = oldPath.replace('/{{ page.version }}', '').replace('/{{ currentVersion }}', '') oldPath = oldPath.replace('/{{ page.version }}', '').replace('/{{ currentVersion }}', '')
// ignore any old paths that are not in this version // ignore any old paths that are not in this version
@@ -152,10 +152,10 @@ async function createRedirectsFile(pageList: PageList, outputDirectory: string)
oldPath.includes(`/enterprise/${version}`) oldPath.includes(`/enterprise/${version}`)
) )
) )
return continue
redirectsPerVersion[oldPath] = newPath redirectsPerVersion[oldPath] = newPath
}) }
fs.writeFileSync( fs.writeFileSync(
path.join(outputDirectory, 'redirects.json'), path.join(outputDirectory, 'redirects.json'),

View File

@@ -102,5 +102,7 @@ function updateFeatureData() {
} }
console.log('Feature files with all versions: ') console.log('Feature files with all versions: ')
allFeatureFiles.forEach((file) => console.log(file)) for (const file of allFeatureFiles) {
console.log(file)
}
} }

View File

@@ -54,7 +54,7 @@ async function main(): Promise<void> {
} }
const formattedDates: EnterpriseDates = {} const formattedDates: EnterpriseDates = {}
Object.entries(rawDates).forEach(([releaseNumber, releaseObject]) => { for (const [releaseNumber, releaseObject] of Object.entries(rawDates)) {
formattedDates[releaseNumber] = { formattedDates[releaseNumber] = {
// For backward compatibility, keep releaseDate as RC date initially, then GA date once available // For backward compatibility, keep releaseDate as RC date initially, then GA date once available
releaseDate: releaseObject.release_candidate || releaseObject.start, releaseDate: releaseObject.release_candidate || releaseObject.start,
@@ -62,7 +62,7 @@ async function main(): Promise<void> {
releaseCandidateDate: releaseObject.release_candidate, releaseCandidateDate: releaseObject.release_candidate,
generalAvailabilityDate: releaseObject.start, generalAvailabilityDate: releaseObject.start,
} }
}) }
const formattedDatesString = JSON.stringify(formattedDates, null, 2) const formattedDatesString = JSON.stringify(formattedDates, null, 2)

View File

@@ -378,10 +378,10 @@ function getDisplayPermissions(
): Array<Record<string, string>> { ): Array<Record<string, string>> {
const displayPermissions = permissionSets.map((permissionSet) => { const displayPermissions = permissionSets.map((permissionSet) => {
const displayPermissionSet: Record<string, string> = {} const displayPermissionSet: Record<string, string> = {}
Object.entries(permissionSet).forEach(([key, value]) => { for (const [key, value] of Object.entries(permissionSet)) {
const { displayTitle } = getDisplayTitle(key, progActorResources, true) const { displayTitle } = getDisplayTitle(key, progActorResources, true)
displayPermissionSet[displayTitle] = value displayPermissionSet[displayTitle] = value
}) }
return displayPermissionSet return displayPermissionSet
}) })
@@ -592,9 +592,9 @@ async function getProgActorResourceContent({
if (Object.keys(fileContent).length !== 1) { if (Object.keys(fileContent).length !== 1) {
throw new Error(`Error: The file ${JSON.stringify(fileContent)} must only have one key.`) throw new Error(`Error: The file ${JSON.stringify(fileContent)} must only have one key.`)
} }
Object.entries(fileContent).forEach(([key, value]) => { for (const [key, value] of Object.entries(fileContent)) {
progActorResources[key] = value progActorResources[key] = value
}) }
} }
return progActorResources return progActorResources
} }

View File

@@ -19,9 +19,9 @@ const changelog = new Map<string, any>()
const graphqlSchema = new Map<string, any>() const graphqlSchema = new Map<string, any>()
const miniTocs = new Map<string, Map<string, Map<string, any[]>>>() const miniTocs = new Map<string, Map<string, Map<string, any[]>>>()
Object.keys(languages).forEach((language) => { for (const language of Object.keys(languages)) {
miniTocs.set(language, new Map()) miniTocs.set(language, new Map())
}) }
// Using any for return type as the GraphQL schema structure is complex and dynamically loaded from JSON // Using any for return type as the GraphQL schema structure is complex and dynamically loaded from JSON
export function getGraphqlSchema(version: string, type: string): any { export function getGraphqlSchema(version: string, type: string): any {

View File

@@ -56,16 +56,16 @@ export const getServerSideProps: GetServerSideProps<Props> = async (context) =>
// <li>Field filename was added to object type <code>IssueTemplate</code></li> // <li>Field filename was added to object type <code>IssueTemplate</code></li>
// //
// ...without the additional <p>. // ...without the additional <p>.
schema.forEach((item) => { for (const item of schema) {
for (const group of [item.schemaChanges, item.previewChanges, item.upcomingChanges]) { for (const group of [item.schemaChanges, item.previewChanges, item.upcomingChanges]) {
group.forEach((change) => { for (const change of group) {
change.changes = change.changes.map((html) => { change.changes = change.changes.map((html) => {
if (html.startsWith('<p>') && html.endsWith('</p>')) return html.slice(3, -4) if (html.startsWith('<p>') && html.endsWith('</p>')) return html.slice(3, -4)
return html return html
}) })
}) }
} }
}) }
return { return {
props: { props: {

View File

@@ -97,14 +97,14 @@ export async function createChangelogEntry(
const changes = await diff(oldSchema, newSchema) const changes = await diff(oldSchema, newSchema)
const changesToReport: Change[] = [] const changesToReport: Change[] = []
const ignoredChanges: Change[] = [] const ignoredChanges: Change[] = []
changes.forEach((change) => { for (const change of changes) {
if (CHANGES_TO_REPORT.includes(change.type)) { if (CHANGES_TO_REPORT.includes(change.type)) {
changesToReport.push(change) changesToReport.push(change)
} else { } else {
// Track ignored changes for visibility // Track ignored changes for visibility
ignoredChanges.push(change) ignoredChanges.push(change)
} }
}) }
// Log warnings for ignored change types to provide visibility // Log warnings for ignored change types to provide visibility
if (ignoredChanges.length > 0) { if (ignoredChanges.length > 0) {
@@ -112,10 +112,10 @@ export async function createChangelogEntry(
console.warn( console.warn(
`⚠️ GraphQL changelog: Ignoring ${ignoredChanges.length} changes of ${ignoredTypes.length} type(s):`, `⚠️ GraphQL changelog: Ignoring ${ignoredChanges.length} changes of ${ignoredTypes.length} type(s):`,
) )
ignoredTypes.forEach((type) => { for (const type of ignoredTypes) {
const count = ignoredChanges.filter((change) => change.type === type).length const count = ignoredChanges.filter((change) => change.type === type).length
console.warn(` - ${type} (${count} change${count > 1 ? 's' : ''})`) console.warn(` - ${type} (${count} change${count > 1 ? 's' : ''})`)
}) }
console.warn( console.warn(
' These change types are not in CHANGES_TO_REPORT and will not appear in the changelog.', ' These change types are not in CHANGES_TO_REPORT and will not appear in the changelog.',
) )
@@ -257,15 +257,15 @@ export function segmentPreviewChanges(
// Build a map of `{ path => previewTitle` } // Build a map of `{ path => previewTitle` }
// for easier lookup of change to preview // for easier lookup of change to preview
const pathToPreview: Record<string, string> = {} const pathToPreview: Record<string, string> = {}
previews.forEach(function (preview): void { for (const preview of previews) {
preview.toggled_on.forEach(function (path) { for (const path of preview.toggled_on) {
pathToPreview[path] = preview.title pathToPreview[path] = preview.title
}) }
}) }
const schemaChanges: Change[] = [] const schemaChanges: Change[] = []
const changesByPreview: Record<string, PreviewChanges> = {} const changesByPreview: Record<string, PreviewChanges> = {}
changesToReport.forEach(function (change): void { for (const change of changesToReport) {
// For each change, see if its path _or_ one of its ancestors // For each change, see if its path _or_ one of its ancestors
// is covered by a preview. If it is, mark this change as belonging to a preview // is covered by a preview. If it is, mark this change as belonging to a preview
const pathParts = change.path?.split('.') || [] const pathParts = change.path?.split('.') || []
@@ -290,7 +290,7 @@ export function segmentPreviewChanges(
} else { } else {
schemaChanges.push(change) schemaChanges.push(change)
} }
}) }
return { schemaChangesToReport: schemaChanges, previewChangesToReport: changesByPreview } return { schemaChangesToReport: schemaChanges, previewChangesToReport: changesByPreview }
} }

View File

@@ -21,7 +21,7 @@ export default function processPreviews(previews: RawPreview[]): ProcessedPrevie
// clean up raw yml data // clean up raw yml data
// Using any type because we're mutating the preview object to add new properties // Using any type because we're mutating the preview object to add new properties
// that don't exist in the RawPreview interface (accept_header, href) // that don't exist in the RawPreview interface (accept_header, href)
previews.forEach((preview: any) => { for (const preview of previews as any[]) {
preview.title = sentenceCase(preview.title) preview.title = sentenceCase(preview.title)
.replace(/ -.+/, '') // remove any extra info that follows a hyphen .replace(/ -.+/, '') // remove any extra info that follows a hyphen
.replace('it hub', 'itHub') // fix overcorrected `git hub` from sentenceCasing .replace('it hub', 'itHub') // fix overcorrected `git hub` from sentenceCasing
@@ -46,7 +46,7 @@ export default function processPreviews(previews: RawPreview[]): ProcessedPrevie
slugger.reset() slugger.reset()
preview.href = `/graphql/overview/schema-previews#${slugger.slug(preview.title)}` preview.href = `/graphql/overview/schema-previews#${slugger.slug(preview.title)}`
}) }
return previews as ProcessedPreview[] return previews as ProcessedPreview[]
} }

View File

@@ -23,16 +23,16 @@ describe('graphql json files', () => {
// so use a cache of which we've already validated to speed this // so use a cache of which we've already validated to speed this
// test up significantly. // test up significantly.
const typeObjsTested = new Set<string>() const typeObjsTested = new Set<string>()
graphqlVersions.forEach((version) => { for (const version of graphqlVersions) {
const schemaJsonPerVersion = readJsonFile( const schemaJsonPerVersion = readJsonFile(
`${GRAPHQL_DATA_DIR}/${version}/schema.json`, `${GRAPHQL_DATA_DIR}/${version}/schema.json`,
) as Record<string, Array<{ kind: string; name: string }>> ) as Record<string, Array<{ kind: string; name: string }>>
// all graphql types are arrays except for queries // all graphql types are arrays except for queries
graphqlTypes.forEach((type) => { for (const type of graphqlTypes) {
test(`${version} schemas object validation for ${type}`, () => { test(`${version} schemas object validation for ${type}`, () => {
schemaJsonPerVersion[type].forEach((typeObj) => { for (const typeObj of schemaJsonPerVersion[type]) {
const key = JSON.stringify(typeObj) + type const key = JSON.stringify(typeObj) + type
if (typeObjsTested.has(key)) return if (typeObjsTested.has(key)) continue
typeObjsTested.add(key) typeObjsTested.add(key)
const { isValid, errors } = validateJson( const { isValid, errors } = validateJson(
@@ -48,15 +48,15 @@ describe('graphql json files', () => {
} }
expect(isValid, formattedErrors).toBe(true) expect(isValid, formattedErrors).toBe(true)
}) }
}) })
}) }
}) }
test('previews object validation', () => { test('previews object validation', () => {
graphqlVersions.forEach((version) => { for (const version of graphqlVersions) {
const previews = readJsonFile(`${GRAPHQL_DATA_DIR}/${version}/previews.json`) as Array<any> // GraphQL preview schema structure is dynamic const previews = readJsonFile(`${GRAPHQL_DATA_DIR}/${version}/previews.json`) as Array<any> // GraphQL preview schema structure is dynamic
previews.forEach((preview) => { for (const preview of previews) {
const isValid = previewsValidate(preview) const isValid = previewsValidate(preview)
let errors: string | undefined let errors: string | undefined
@@ -65,18 +65,18 @@ describe('graphql json files', () => {
} }
expect(isValid, errors).toBe(true) expect(isValid, errors).toBe(true)
}) }
}) }
}) })
test('upcoming changes object validation', () => { test('upcoming changes object validation', () => {
graphqlVersions.forEach((version) => { for (const version of graphqlVersions) {
const upcomingChanges = readJsonFile( const upcomingChanges = readJsonFile(
`${GRAPHQL_DATA_DIR}/${version}/upcoming-changes.json`, `${GRAPHQL_DATA_DIR}/${version}/upcoming-changes.json`,
) as Record<string, Array<any>> // GraphQL change object structure is dynamic ) as Record<string, Array<any>> // GraphQL change object structure is dynamic
for (const changes of Object.values(upcomingChanges)) { for (const changes of Object.values(upcomingChanges)) {
// each object value is an array of changes // each object value is an array of changes
changes.forEach((changeObj) => { for (const changeObj of changes) {
const isValid = upcomingChangesValidate(changeObj) const isValid = upcomingChangesValidate(changeObj)
let errors: string | undefined let errors: string | undefined
@@ -85,8 +85,8 @@ describe('graphql json files', () => {
} }
expect(isValid, errors).toBe(true) expect(isValid, errors).toBe(true)
}) }
} }
}) }
}) })
}) })

View File

@@ -133,7 +133,7 @@ function RestNavListItem({ category }: { category: ProductTreeNode }) {
if (nonAutomatedRestPaths.every((item: string) => !asPath.includes(item))) { if (nonAutomatedRestPaths.every((item: string) => !asPath.includes(item))) {
const observer = new IntersectionObserver( const observer = new IntersectionObserver(
(entries) => { (entries) => {
entries.forEach((entry) => { for (const entry of entries) {
if (entry.target.id) { if (entry.target.id) {
const anchor = `#${entry.target.id.split('--')[0]}` const anchor = `#${entry.target.id.split('--')[0]}`
if (entry.isIntersecting === true) setVisibleAnchor(anchor) if (entry.isIntersecting === true) setVisibleAnchor(anchor)
@@ -142,7 +142,7 @@ function RestNavListItem({ category }: { category: ProductTreeNode }) {
} else { } else {
setVisibleAnchor('') setVisibleAnchor('')
} }
}) }
}, },
{ rootMargin: '0px 0px -85% 0px' }, { rootMargin: '0px 0px -85% 0px' },
) )
@@ -150,9 +150,9 @@ function RestNavListItem({ category }: { category: ProductTreeNode }) {
// we can remove the h2 here // we can remove the h2 here
const headingsList = Array.from(document.querySelectorAll('h2, h3')) const headingsList = Array.from(document.querySelectorAll('h2, h3'))
headingsList.forEach((heading) => { for (const heading of headingsList) {
observer.observe(heading) observer.observe(heading)
}) }
return () => { return () => {
observer.disconnect() observer.disconnect()

View File

@@ -17,18 +17,18 @@ describe('octicons reference', () => {
}) })
test('all octicons are strings', () => { test('all octicons are strings', () => {
VALID_OCTICONS.forEach((octicon) => { for (const octicon of VALID_OCTICONS) {
expect(typeof octicon).toBe('string') expect(typeof octicon).toBe('string')
}) }
}) })
}) })
describe('OCTICON_COMPONENTS', () => { describe('OCTICON_COMPONENTS', () => {
test('has components for all valid octicons', () => { test('has components for all valid octicons', () => {
VALID_OCTICONS.forEach((octicon) => { for (const octicon of VALID_OCTICONS) {
expect(OCTICON_COMPONENTS[octicon]).toBeDefined() expect(OCTICON_COMPONENTS[octicon]).toBeDefined()
expect(typeof OCTICON_COMPONENTS[octicon]).toBe('object') expect(typeof OCTICON_COMPONENTS[octicon]).toBe('object')
}) }
}) })
test('maps specific octicons to correct components', () => { test('maps specific octicons to correct components', () => {
@@ -90,9 +90,9 @@ describe('octicons reference', () => {
// Test a few key octicons to verify the type works correctly // Test a few key octicons to verify the type works correctly
const testOcticons: ValidOcticon[] = ['bug', 'rocket', 'copilot'] const testOcticons: ValidOcticon[] = ['bug', 'rocket', 'copilot']
testOcticons.forEach((octicon) => { for (const octicon of testOcticons) {
expect(VALID_OCTICONS.includes(octicon)).toBe(true) expect(VALID_OCTICONS.includes(octicon)).toBe(true)
}) }
}) })
}) })
@@ -101,9 +101,9 @@ describe('octicons reference', () => {
const componentKeys = Object.keys(OCTICON_COMPONENTS) const componentKeys = Object.keys(OCTICON_COMPONENTS)
const validOcticonsSet = new Set(VALID_OCTICONS) const validOcticonsSet = new Set(VALID_OCTICONS)
componentKeys.forEach((key) => { for (const key of componentKeys) {
expect(validOcticonsSet.has(key as ValidOcticon)).toBe(true) expect(validOcticonsSet.has(key as ValidOcticon)).toBe(true)
}) }
expect(componentKeys).toHaveLength(VALID_OCTICONS.length) expect(componentKeys).toHaveLength(VALID_OCTICONS.length)
}) })

View File

@@ -369,12 +369,14 @@ export function correctTranslatedContentStrings(
} }
if (content.includes('{{%')) { if (content.includes('{{%')) {
content.split('\n').forEach((line, i) => { const lines = content.split('\n')
for (let i = 0; i < lines.length; i++) {
const line = lines[i]
if (line.includes('{{%') && !line.includes('{{{% endraw')) { if (line.includes('{{%') && !line.includes('{{{% endraw')) {
console.log(context.code, 'context.relativePath', context.relativePath) console.log(context.code, 'context.relativePath', context.relativePath)
console.log(i, line) console.log(i, line)
} }
}) }
} }
return content return content

View File

@@ -56,9 +56,10 @@ export default function getEnglishHeadings(
// return a map from translation:English // return a map from translation:English
const headingMap: Record<string, string> = {} const headingMap: Record<string, string> = {}
translatedHeadings.forEach((k: string, i: number) => { for (let i = 0; i < translatedHeadings.length; i++) {
const k = translatedHeadings[i]
headingMap[k] = englishHeadings[i] headingMap[k] = englishHeadings[i]
}) }
return headingMap return headingMap
} }

View File

@@ -60,26 +60,26 @@ const languages: Languages = { ...allLanguagesWithDirs }
if (TRANSLATIONS_FIXTURE_ROOT) { if (TRANSLATIONS_FIXTURE_ROOT) {
// Keep all languages that have a directory in the fixture root. // Keep all languages that have a directory in the fixture root.
Object.entries(languages).forEach(([code, { dir }]) => { for (const [code, { dir }] of Object.entries(languages)) {
if (code !== 'en' && !fs.existsSync(dir)) { if (code !== 'en' && !fs.existsSync(dir)) {
delete languages[code] delete languages[code]
} }
}) }
} else if (process.env.ENABLED_LANGUAGES) { } else if (process.env.ENABLED_LANGUAGES) {
if (process.env.ENABLED_LANGUAGES.toLowerCase() !== 'all') { if (process.env.ENABLED_LANGUAGES.toLowerCase() !== 'all') {
Object.keys(languages).forEach((code) => { for (const code of Object.keys(languages)) {
if (!process.env.ENABLED_LANGUAGES!.includes(code)) { if (!process.env.ENABLED_LANGUAGES!.includes(code)) {
delete languages[code] delete languages[code]
} }
}) }
// This makes the translation health report not valid JSON // This makes the translation health report not valid JSON
// console.log(`ENABLED_LANGUAGES: ${process.env.ENABLED_LANGUAGES}`) // console.log(`ENABLED_LANGUAGES: ${process.env.ENABLED_LANGUAGES}`)
} }
} else if (process.env.NODE_ENV === 'test') { } else if (process.env.NODE_ENV === 'test') {
// Unless explicitly set, when running tests default to just English // Unless explicitly set, when running tests default to just English
Object.keys(languages).forEach((code) => { for (const code of Object.keys(languages)) {
if (code !== 'en') delete languages[code] if (code !== 'en') delete languages[code]
}) }
} }
export const languageKeys: string[] = Object.keys(languages) export const languageKeys: string[] = Object.keys(languages)

View File

@@ -134,24 +134,28 @@ function run(languageCode: string, site: Site, englishReusables: Reusables) {
const sumTotal = flat.reduce((acc, [, count]) => acc + count, 0) const sumTotal = flat.reduce((acc, [, count]) => acc + count, 0)
console.log('\nMost common errors') console.log('\nMost common errors')
flat.forEach(([error, count], i) => { for (let i = 0; i < flat.length; i++) {
const [error, count] = flat[i]
console.log(`${i + 1}.`.padEnd(3), error.padEnd(PADDING), count) console.log(`${i + 1}.`.padEnd(3), error.padEnd(PADDING), count)
}) }
console.log(`${'TOTAL:'.padEnd(3 + 1 + PADDING)}`, sumTotal) console.log(`${'TOTAL:'.padEnd(3 + 1 + PADDING)}`, sumTotal)
if (sumTotal) { if (sumTotal) {
const whereFlat = Array.from(wheres.entries()).sort((a, b) => b[1] - a[1]) const whereFlat = Array.from(wheres.entries()).sort((a, b) => b[1] - a[1])
console.log('\nMost common places') console.log('\nMost common places')
whereFlat.forEach(([error, count], i) => { for (let i = 0; i < whereFlat.length; i++) {
const [error, count] = whereFlat[i]
console.log(`${i + 1}.`.padEnd(3), error.padEnd(PADDING), count) console.log(`${i + 1}.`.padEnd(3), error.padEnd(PADDING), count)
}) }
const illegalTagsFlat = Array.from(illegalTags.entries()).sort((a, b) => b[1] - a[1]) const illegalTagsFlat = Array.from(illegalTags.entries()).sort((a, b) => b[1] - a[1])
if (illegalTagsFlat.reduce((acc, [, count]) => acc + count, 0)) { if (illegalTagsFlat.reduce((acc, [, count]) => acc + count, 0)) {
console.log('\nMost common illegal tags', illegalTagsFlat.length > 10 ? ' (Top 10)' : '') console.log('\nMost common illegal tags', illegalTagsFlat.length > 10 ? ' (Top 10)' : '')
illegalTagsFlat.slice(0, 10).forEach(([error, count], i) => { const topIllegalTags = illegalTagsFlat.slice(0, 10)
for (let i = 0; i < topIllegalTags.length; i++) {
const [error, count] = topIllegalTags[i]
console.log(`${i + 1}.`.padEnd(3), error.padEnd(PADDING), count) console.log(`${i + 1}.`.padEnd(3), error.padEnd(PADDING), count)
}) }
} }
} }
console.log('\n') console.log('\n')

View File

@@ -122,18 +122,18 @@ async function main(opts: MainOptions, args: string[]) {
const helpIndices = getIndicesOf('GitHub.help_url', contents) const helpIndices = getIndicesOf('GitHub.help_url', contents)
helpIndices.push(...getIndicesOf('GitHub.developer_help_url', contents)) helpIndices.push(...getIndicesOf('GitHub.developer_help_url', contents))
if (docsIndices.length > 0) { if (docsIndices.length > 0) {
docsIndices.forEach((numIndex) => { for (const numIndex of docsIndices) {
// Assuming we don't have links close to 500 characters long // Assuming we don't have links close to 500 characters long
const docsLink = contents.substring(numIndex, numIndex + 500).match(urlRegEx) const docsLink = contents.substring(numIndex, numIndex + 500).match(urlRegEx)
if (!docsLink) return if (!docsLink) return
const linkURL = new URL(docsLink[0].toString().replace(/[^a-zA-Z0-9]*$|\\n$/g, '')) const linkURL = new URL(docsLink[0].toString().replace(/[^a-zA-Z0-9]*$|\\n$/g, ''))
const linkPath = linkURL.pathname + linkURL.hash const linkPath = linkURL.pathname + linkURL.hash
docsLinksFiles.push({ linkPath, file }) docsLinksFiles.push({ linkPath, file })
}) }
} }
if (helpIndices.length > 0) { if (helpIndices.length > 0) {
helpIndices.forEach((numIndex) => { for (const numIndex of helpIndices) {
// There are certain links like #{GitHub.help_url}#{learn_more_path} and #{GitHub.developer_help_url}#{learn_more_path} that we should skip // There are certain links like #{GitHub.help_url}#{learn_more_path} and #{GitHub.developer_help_url}#{learn_more_path} that we should skip
if ( if (
(contents.substring(numIndex, numIndex + 11) === 'GitHub.help' && (contents.substring(numIndex, numIndex + 11) === 'GitHub.help' &&
@@ -170,7 +170,7 @@ async function main(opts: MainOptions, args: string[]) {
docsLinksFiles.push({ linkPath, file }) docsLinksFiles.push({ linkPath, file })
} }
}) }
} }
} }
} }

View File

@@ -18,11 +18,11 @@ const STATIC_PREFIXES = {
public: path.resolve(path.join('src', 'graphql', 'data')), public: path.resolve(path.join('src', 'graphql', 'data')),
} }
// Sanity check that these are valid paths // Sanity check that these are valid paths
Object.entries(STATIC_PREFIXES).forEach(([key, value]) => { for (const [key, value] of Object.entries(STATIC_PREFIXES)) {
if (!fs.existsSync(value)) { if (!fs.existsSync(value)) {
throw new Error(`Can't find static prefix (${key}): ${value}`) throw new Error(`Can't find static prefix (${key}): ${value}`)
} }
}) }
program program
.description('Analyze all checked content files, render them, and check for flaws.') .description('Analyze all checked content files, render them, and check for flaws.')

View File

@@ -87,11 +87,11 @@ const STATIC_PREFIXES: Record<string, string> = {
public: path.resolve(path.join('src', 'graphql', 'data')), public: path.resolve(path.join('src', 'graphql', 'data')),
} }
// Sanity check that these are valid paths // Sanity check that these are valid paths
Object.entries(STATIC_PREFIXES).forEach(([key, value]) => { for (const [key, value] of Object.entries(STATIC_PREFIXES)) {
if (!fs.existsSync(value)) { if (!fs.existsSync(value)) {
throw new Error(`Can't find static prefix (${key}): ${value}`) throw new Error(`Can't find static prefix (${key}): ${value}`)
} }
}) }
// By default, we don't cache external link checks to disk. // By default, we don't cache external link checks to disk.
// By setting this env var to something >0, it enables the disk-based // By setting this env var to something >0, it enables the disk-based

View File

@@ -228,7 +228,8 @@ function printObjectDifference(
const combinedKey = `${parentKey}.${key}` const combinedKey = `${parentKey}.${key}`
if (Array.isArray(value) && !equalArray(value, objTo[key])) { if (Array.isArray(value) && !equalArray(value, objTo[key])) {
const printedKeys = new Set() const printedKeys = new Set()
value.forEach((entry, i) => { for (let i = 0; i < value.length; i++) {
const entry = value[i]
// If it was an array of objects, we need to go deeper! // If it was an array of objects, we need to go deeper!
if (isObject(entry)) { if (isObject(entry)) {
printObjectDifference(entry, objTo[key][i], rawContent, combinedKey) printObjectDifference(entry, objTo[key][i], rawContent, combinedKey)
@@ -243,10 +244,9 @@ function printObjectDifference(
const needle = new RegExp(`- ${entry}\\b`) const needle = new RegExp(`- ${entry}\\b`)
const index = rawContent.split(/\n/g).findIndex((line) => needle.test(line)) const index = rawContent.split(/\n/g).findIndex((line) => needle.test(line))
console.log(' ', chalk.dim(`line ${(index && index + 1) || 'unknown'}`)) console.log(' ', chalk.dim(`line ${(index && index + 1) || 'unknown'}`))
console.log('')
} }
} }
}) }
} else if (typeof value === 'object' && value !== null) { } else if (typeof value === 'object' && value !== null) {
printObjectDifference(value, objTo[key], rawContent, combinedKey) printObjectDifference(value, objTo[key], rawContent, combinedKey)
} }

View File

@@ -33,9 +33,9 @@ interface LoggerMethod {
(message: string, ...args: (string | number | boolean | Error | IncludeContext)[]): void (message: string, ...args: (string | number | boolean | Error | IncludeContext)[]): void
} }
/* /*
Call this function with `import.meta.url` as the argument to create a logger for a specific file. Call this function with `import.meta.url` as the argument to create a logger for a specific file.
e.g. `const logger = createLogger(import.meta.url)` e.g. `const logger = createLogger(import.meta.url)`
Logs will be output to the console in development, and in `logfmt` format to stdout in production. Logs will be output to the console in development, and in `logfmt` format to stdout in production.
@@ -111,9 +111,10 @@ export function createLogger(filePath: string) {
finalMessage = `${finalMessage}: ${errorObjects[0].message}` finalMessage = `${finalMessage}: ${errorObjects[0].message}`
} else { } else {
// Multiple errors - use indexed keys and append all error messages // Multiple errors - use indexed keys and append all error messages
errorObjects.forEach((error, index) => { for (let index = 0; index < errorObjects.length; index++) {
const error = errorObjects[index]
includeContext[`error_${index + 1}`] = error includeContext[`error_${index + 1}`] = error
}) }
const errorMessages = errorObjects.map((err) => err.message).join(', ') const errorMessages = errorObjects.map((err) => err.message).join(', ')
finalMessage = `${finalMessage}: ${errorMessages}` finalMessage = `${finalMessage}: ${errorMessages}`
} }

View File

@@ -1,4 +1,4 @@
/* /*
Flattens a JSON object and converts it to a logfmt string Flattens a JSON object and converts it to a logfmt string
Nested objects are flattened with a dot separator, e.g. requestContext.path=/en Nested objects are flattened with a dot separator, e.g. requestContext.path=/en
This is because Splunk doesn't support nested JSON objects. This is because Splunk doesn't support nested JSON objects.
@@ -61,7 +61,7 @@ export function toLogfmt(jsonString: Record<string, any>): string {
result: Record<string, any> = {}, result: Record<string, any> = {},
seen: WeakSet<object> = new WeakSet(), seen: WeakSet<object> = new WeakSet(),
): Record<string, any> => { ): Record<string, any> => {
Object.keys(obj).forEach((key) => { for (const key of Object.keys(obj)) {
const newKey = parentKey ? `${parentKey}.${key}` : key const newKey = parentKey ? `${parentKey}.${key}` : key
const value = obj[key] const value = obj[key]
@@ -69,19 +69,19 @@ export function toLogfmt(jsonString: Record<string, any>): string {
// Handle circular references // Handle circular references
if (seen.has(value)) { if (seen.has(value)) {
result[newKey] = '[Circular]' result[newKey] = '[Circular]'
return continue
} }
// Handle Date objects specially // Handle Date objects specially
if (value instanceof Date) { if (value instanceof Date) {
result[newKey] = value.toISOString() result[newKey] = value.toISOString()
return continue
} }
// Handle arrays // Handle arrays
if (Array.isArray(value)) { if (Array.isArray(value)) {
result[newKey] = value.join(',') result[newKey] = value.join(',')
return continue
} }
// Handle other objects - only flatten if not empty // Handle other objects - only flatten if not empty
@@ -96,7 +96,7 @@ export function toLogfmt(jsonString: Record<string, any>): string {
result[newKey] = result[newKey] =
value === undefined || (typeof value === 'string' && value === '') ? null : value value === undefined || (typeof value === 'string' && value === '') ? null : value
} }
}) }
return result return result
} }

View File

@@ -125,11 +125,11 @@ export async function getLocalizedGroupNames(lang: string): Promise<{ [key: stri
export function createOcticonToNameMap(childGroups: ProductGroupData[]): { [key: string]: string } { export function createOcticonToNameMap(childGroups: ProductGroupData[]): { [key: string]: string } {
const octiconToName: { [key: string]: string } = {} const octiconToName: { [key: string]: string } = {}
childGroups.forEach((group: ProductGroupData) => { for (const group of childGroups) {
if (group.octicon && group.name) { if (group.octicon && group.name) {
octiconToName[group.octicon] = group.name octiconToName[group.octicon] = group.name
} }
}) }
return octiconToName return octiconToName
} }
@@ -140,11 +140,11 @@ export function mapEnglishToLocalizedNames(
): { [key: string]: string } { ): { [key: string]: string } {
const nameMap: { [key: string]: string } = {} const nameMap: { [key: string]: string } = {}
englishGroups.forEach((englishGroup: ProductGroupData) => { for (const englishGroup of englishGroups) {
if (englishGroup.octicon && localizedByOcticon[englishGroup.octicon]) { if (englishGroup.octicon && localizedByOcticon[englishGroup.octicon]) {
nameMap[englishGroup.name] = localizedByOcticon[englishGroup.octicon] nameMap[englishGroup.name] = localizedByOcticon[englishGroup.octicon]
} }
}) }
return nameMap return nameMap
} }

View File

@@ -5,8 +5,8 @@ const productNames: ProductNames = {
dotcom: 'GitHub.com', dotcom: 'GitHub.com',
} }
enterpriseServerReleases.all.forEach((version) => { for (const version of enterpriseServerReleases.all) {
productNames[version] = `Enterprise Server ${version}` productNames[version] = `Enterprise Server ${version}`
}) }
export default productNames export default productNames

View File

@@ -14,7 +14,7 @@ describe('products module', () => {
}) })
test('every product is valid', () => { test('every product is valid', () => {
Object.values(productMap).forEach((product) => { for (const product of Object.values(productMap)) {
const isValid = validate(product) const isValid = validate(product)
let errors: string | undefined let errors: string | undefined
@@ -22,6 +22,6 @@ describe('products module', () => {
errors = formatAjvErrors(validate.errors) errors = formatAjvErrors(validate.errors)
} }
expect(isValid, errors).toBe(true) expect(isValid, errors).toBe(true)
}) }
}) })
}) })

View File

@@ -25,7 +25,7 @@ export default function permalinkRedirects(
// For every "old" path in a content file's redirect_from frontmatter, also add that path to // For every "old" path in a content file's redirect_from frontmatter, also add that path to
// the redirects object as a key, where the value is the content file's permalink. // the redirects object as a key, where the value is the content file's permalink.
redirectFrom.forEach((frontmatterOldPath) => { for (let frontmatterOldPath of redirectFrom) {
if (!frontmatterOldPath.startsWith('/')) { if (!frontmatterOldPath.startsWith('/')) {
throw new Error( throw new Error(
`'${frontmatterOldPath}' is not a valid redirect_from frontmatter value because it doesn't start with a /`, `'${frontmatterOldPath}' is not a valid redirect_from frontmatter value because it doesn't start with a /`,
@@ -40,7 +40,8 @@ export default function permalinkRedirects(
.replace('/admin/guides/', '/admin/') .replace('/admin/guides/', '/admin/')
.replace(/^\/enterprise\/admin\//, '/admin/') .replace(/^\/enterprise\/admin\//, '/admin/')
permalinks.forEach((permalink, index) => { for (let index = 0; index < permalinks.length; index++) {
const permalink = permalinks[index]
// For the first supported permalink (the order is determined by lib/all-versions), // For the first supported permalink (the order is determined by lib/all-versions),
// put an entry into `redirects` without any version prefix. // put an entry into `redirects` without any version prefix.
if (index === 0) { if (index === 0) {
@@ -49,8 +50,8 @@ export default function permalinkRedirects(
// For every permalink, put an entry into `redirects` with the version prefix. // For every permalink, put an entry into `redirects` with the version prefix.
redirects[`/${permalink.pageVersion}${frontmatterOldPath}`] = permalink.hrefWithoutLanguage redirects[`/${permalink.pageVersion}${frontmatterOldPath}`] = permalink.hrefWithoutLanguage
}) }
}) }
return redirects return redirects
} }

View File

@@ -22,9 +22,9 @@ export async function precompileRedirects(pageList: Page[]): Promise<Redirects>
// CURRENT PAGES PERMALINKS AND FRONTMATTER // CURRENT PAGES PERMALINKS AND FRONTMATTER
// create backwards-compatible old paths for page permalinks and frontmatter redirects // create backwards-compatible old paths for page permalinks and frontmatter redirects
pageList for (const page of pageList.filter((xpage) => xpage.languageCode === 'en')) {
.filter((page) => page.languageCode === 'en') Object.assign(allRedirects, page.buildRedirects())
.forEach((page) => Object.assign(allRedirects, page.buildRedirects())) }
// NOTE: Exception redirects **MUST COME AFTER** pageList redirects above in order // NOTE: Exception redirects **MUST COME AFTER** pageList redirects above in order
// to properly override them. Exception redirects are unicorn one-offs that are not // to properly override them. Exception redirects are unicorn one-offs that are not
@@ -46,7 +46,7 @@ export async function precompileRedirects(pageList: Page[]): Promise<Redirects>
const exceptions = getExceptionRedirects(EXCEPTIONS_FILE) as Redirects const exceptions = getExceptionRedirects(EXCEPTIONS_FILE) as Redirects
Object.assign(allRedirects, exceptions) Object.assign(allRedirects, exceptions)
Object.entries(allRedirects).forEach(([fromURI, toURI]) => { for (const [fromURI, toURI] of Object.entries(allRedirects)) {
// If the destination URL has a hardcoded `enterprise-server@latest` in // If the destination URL has a hardcoded `enterprise-server@latest` in
// it we need to rewrite that now. // it we need to rewrite that now.
// We never want to redirect to that as the final URL (in the 301 response) // We never want to redirect to that as the final URL (in the 301 response)
@@ -60,7 +60,7 @@ export async function precompileRedirects(pageList: Page[]): Promise<Redirects>
`/enterprise-server@${latest}`, `/enterprise-server@${latest}`,
) )
} }
}) }
return allRedirects return allRedirects
} }

View File

@@ -58,17 +58,17 @@ describe('versioned redirects', () => {
expect(redirectKeys.length).toBeGreaterThan(0) expect(redirectKeys.length).toBeGreaterThan(0)
// Verify all old paths are properly formatted // Verify all old paths are properly formatted
redirectKeys.forEach((oldPath) => { for (const oldPath of redirectKeys) {
expect(oldPath).toMatch(/^\/[a-z0-9-/]+$/) expect(oldPath).toMatch(/^\/[a-z0-9-/]+$/)
expect(oldPath).not.toMatch(/^\/en\//) expect(oldPath).not.toMatch(/^\/en\//)
}) }
// Verify all new paths have proper versioning // Verify all new paths have proper versioning
Object.values(versionlessRedirects).forEach((newPath) => { for (const newPath of Object.values(versionlessRedirects)) {
expect(newPath).toMatch( expect(newPath).toMatch(
/^\/(enterprise-cloud@latest|enterprise-server@latest|admin|github|articles|billing|code-security|actions|packages|copilot|rest|webhooks|developers)/, /^\/(enterprise-cloud@latest|enterprise-server@latest|admin|github|articles|billing|code-security|actions|packages|copilot|rest|webhooks|developers)/,
) )
}) }
}) })
test('enterprise-server@latest paths are properly transformed', () => { test('enterprise-server@latest paths are properly transformed', () => {
@@ -76,7 +76,7 @@ describe('versioned redirects', () => {
newPath.includes('/enterprise-server@latest'), newPath.includes('/enterprise-server@latest'),
) )
enterpriseServerPaths.forEach(([, newPath]) => { for (const [, newPath] of enterpriseServerPaths) {
const transformedPath = `/en${newPath.replace( const transformedPath = `/en${newPath.replace(
'/enterprise-server@latest', '/enterprise-server@latest',
`/enterprise-server@${latest}`, `/enterprise-server@${latest}`,
@@ -85,6 +85,6 @@ describe('versioned redirects', () => {
expect(transformedPath).toContain(`/enterprise-server@${latest}`) expect(transformedPath).toContain(`/enterprise-server@${latest}`)
expect(transformedPath).not.toContain('/enterprise-server@latest') expect(transformedPath).not.toContain('/enterprise-server@latest')
expect(transformedPath).toMatch(/^\/en\//) expect(transformedPath).toMatch(/^\/en\//)
}) }
}) })
}) })

View File

@@ -88,11 +88,11 @@ export default async function ghesReleaseNotesContext(
req.context.latestRelease = latestStable req.context.latestRelease = latestStable
// Add convenience props for "Supported releases" section on GHES Admin landing page (NOT release notes). // Add convenience props for "Supported releases" section on GHES Admin landing page (NOT release notes).
req.context.ghesReleases.forEach((release) => { for (const release of req.context.ghesReleases) {
release.firstPreviousRelease = all[all.findIndex((v) => v === release.version) + 1] release.firstPreviousRelease = all[all.findIndex((v) => v === release.version) + 1]
release.secondPreviousRelease = release.secondPreviousRelease =
all[all.findIndex((v) => v === release.firstPreviousRelease) + 1] all[all.findIndex((v) => v === release.firstPreviousRelease) + 1]
}) }
return next() return next()
} }

View File

@@ -42,7 +42,7 @@ describe('lint enterprise release notes', () => {
for (const key in sections) { for (const key in sections) {
const section = sections[key] const section = sections[key]
const label = `sections.${key}` const label = `sections.${key}`
section.forEach((part) => { for (const part of section) {
if (Array.isArray(part)) { if (Array.isArray(part)) {
toLint = { ...toLint, ...{ [label]: section.join('\n') } } toLint = { ...toLint, ...{ [label]: section.join('\n') } }
} else { } else {
@@ -53,7 +53,7 @@ describe('lint enterprise release notes', () => {
} }
} }
} }
}) }
} }
// Create context with site data for rendering liquid variables // Create context with site data for rendering liquid variables

View File

@@ -25,14 +25,14 @@ export const RestReferencePage = ({ restOperations }: StructuredContentT) => {
useEffect(() => { useEffect(() => {
const codeBlocks = document.querySelectorAll<HTMLPreElement>('pre') const codeBlocks = document.querySelectorAll<HTMLPreElement>('pre')
codeBlocks.forEach((codeBlock) => { for (const codeBlock of codeBlocks) {
if ( if (
codeBlock.scrollWidth > codeBlock.clientWidth || codeBlock.scrollWidth > codeBlock.clientWidth ||
codeBlock.scrollHeight > codeBlock.clientHeight codeBlock.scrollHeight > codeBlock.clientHeight
) { ) {
codeBlock.setAttribute('tabindex', '0') codeBlock.setAttribute('tabindex', '0')
} }
}) }
}, []) }, [])
return ( return (

View File

@@ -101,10 +101,10 @@ export function getShellExample(
const { bodyParameters } = codeSample.request const { bodyParameters } = codeSample.request
if (bodyParameters && typeof bodyParameters === 'object' && !Array.isArray(bodyParameters)) { if (bodyParameters && typeof bodyParameters === 'object' && !Array.isArray(bodyParameters)) {
const paramNames = Object.keys(bodyParameters) const paramNames = Object.keys(bodyParameters)
paramNames.forEach((elem) => { for (const elem of paramNames) {
const escapedValue = escapeShellValue(String(bodyParameters[elem])) const escapedValue = escapeShellValue(String(bodyParameters[elem]))
requestBodyParams = `${requestBodyParams} ${CURL_CONTENT_TYPE_MAPPING[contentType]} '${elem}=${escapedValue}'` requestBodyParams = `${requestBodyParams} ${CURL_CONTENT_TYPE_MAPPING[contentType]} '${elem}=${escapedValue}'`
}) }
} else { } else {
const escapedValue = escapeShellValue(String(bodyParameters)) const escapedValue = escapeShellValue(String(bodyParameters))
requestBodyParams = `${CURL_CONTENT_TYPE_MAPPING[contentType]} "${escapedValue}"` requestBodyParams = `${CURL_CONTENT_TYPE_MAPPING[contentType]} "${escapedValue}"`

View File

@@ -59,22 +59,22 @@ const restOperationData = new Map<
>() >()
const restOperations = new Map<string, Map<string, RestOperationData>>() const restOperations = new Map<string, Map<string, RestOperationData>>()
Object.keys(languages).forEach((language: string) => { for (const language of Object.keys(languages)) {
restOperationData.set(language, new Map()) restOperationData.set(language, new Map())
Object.keys(allVersions).forEach((version: string) => { for (const version of Object.keys(allVersions)) {
// setting to undefined will allow us to perform checks // setting to undefined will allow us to perform checks
// more easily later on // more easily later on
restOperationData.get(language)!.set(version, new Map()) restOperationData.get(language)!.set(version, new Map())
if (allVersions[version].apiVersions && allVersions[version].apiVersions.length > 0) { if (allVersions[version].apiVersions && allVersions[version].apiVersions.length > 0) {
allVersions[version].apiVersions.forEach((date: string) => { for (const date of allVersions[version].apiVersions) {
restOperationData.get(language)!.get(version)!.set(date, new Map()) restOperationData.get(language)!.get(version)!.set(date, new Map())
}) }
} else { } else {
// Products that are not been calendar date versioned // Products that are not been calendar date versioned
restOperationData.get(language)!.get(version)!.set(NOT_API_VERSIONED, new Map()) restOperationData.get(language)!.get(version)!.set(NOT_API_VERSIONED, new Map())
} }
}) }
}) }
export const categoriesWithoutSubcategories: string[] = fs export const categoriesWithoutSubcategories: string[] = fs
.readdirSync(REST_CONTENT_DIR) .readdirSync(REST_CONTENT_DIR)

View File

@@ -144,7 +144,7 @@ export const getServerSideProps: GetServerSideProps<Props> = async (context) =>
req.context, req.context,
)) as MinitocItemsT )) as MinitocItemsT
miniTocItems.restOperationsMiniTocItems.forEach((operationMinitoc) => { for (const operationMinitoc of miniTocItems.restOperationsMiniTocItems) {
const { title, href: miniTocAnchor } = operationMinitoc.contents const { title, href: miniTocAnchor } = operationMinitoc.contents
const fullPath = `/${context.locale}${versionPathSegment}rest/${context.params?.category}/${subCat}${miniTocAnchor}` const fullPath = `/${context.locale}${versionPathSegment}rest/${context.params?.category}/${subCat}${miniTocAnchor}`
@@ -152,7 +152,7 @@ export const getServerSideProps: GetServerSideProps<Props> = async (context) =>
fullPath, fullPath,
title, title,
}) })
}) }
// TocLanding expects a collection of objects that looks like this: // TocLanding expects a collection of objects that looks like this:
// //

View File

@@ -37,12 +37,12 @@ export async function getDiffOpenAPIContentRest(): Promise<ErrorMessages> {
for (const schemaName in differences) { for (const schemaName in differences) {
errorMessages[schemaName] = {} errorMessages[schemaName] = {}
differences[schemaName].forEach((category) => { for (const category of differences[schemaName]) {
errorMessages[schemaName][category] = { errorMessages[schemaName][category] = {
contentDir: checkContentDir[schemaName][category], contentDir: checkContentDir[schemaName][category],
openAPI: openAPISchemaCheck[schemaName][category], openAPI: openAPISchemaCheck[schemaName][category],
} }
}) }
} }
} }
@@ -57,23 +57,23 @@ async function createOpenAPISchemasCheck(): Promise<CheckObject> {
// Allow the most recent deprecation to exist on disk until fully deprecated // Allow the most recent deprecation to exist on disk until fully deprecated
.filter((dir) => !dir.includes(deprecated[0])) .filter((dir) => !dir.includes(deprecated[0]))
restDirectory.forEach((dir) => { for (const dir of restDirectory) {
const filename = path.join(REST_DATA_DIR, dir, REST_SCHEMA_FILENAME) const filename = path.join(REST_DATA_DIR, dir, REST_SCHEMA_FILENAME)
const fileSchema = JSON.parse(fs.readFileSync(filename, 'utf8')) const fileSchema = JSON.parse(fs.readFileSync(filename, 'utf8'))
const categories = Object.keys(fileSchema).sort() const categories = Object.keys(fileSchema).sort()
const version = getDocsVersion(dir) const version = getDocsVersion(dir)
categories.forEach((category) => { for (const category of categories) {
const subcategories = Object.keys(fileSchema[category]) as string[] const subcategories = Object.keys(fileSchema[category]) as string[]
if (isApiVersioned(version)) { if (isApiVersioned(version)) {
getOnlyApiVersions(version).forEach( for (const apiVersion of getOnlyApiVersions(version)) {
(apiVersion) => (openAPICheck[apiVersion][category] = subcategories.sort()), openAPICheck[apiVersion][category] = subcategories.sort()
) }
} else { } else {
openAPICheck[version][category] = subcategories.sort() openAPICheck[version][category] = subcategories.sort()
} }
}) }
}) }
return openAPICheck return openAPICheck
} }
@@ -97,14 +97,14 @@ async function createCheckContentDirectory(contentFiles: string[]): Promise<Chec
: version : version
}) })
allCompleteVersions.forEach((version) => { for (const version of allCompleteVersions) {
if (!checkContent[version][category]) { if (!checkContent[version][category]) {
checkContent[version][category] = [subCategory] checkContent[version][category] = [subCategory]
} else { } else {
checkContent[version][category].push(subCategory) checkContent[version][category].push(subCategory)
} }
checkContent[version][category].sort() checkContent[version][category].sort()
}) }
} }
return checkContent return checkContent
@@ -122,13 +122,15 @@ function getOnlyApiVersions(version: string): string[] {
function createCheckObj(): CheckObject { function createCheckObj(): CheckObject {
const versions: CheckObject = {} const versions: CheckObject = {}
Object.keys(allVersions).forEach((version) => { for (const version of Object.keys(allVersions)) {
if (isApiVersioned(version)) { if (isApiVersioned(version)) {
getOnlyApiVersions(version).forEach((apiVersion) => (versions[apiVersion] = {})) for (const apiVersion of getOnlyApiVersions(version)) {
versions[apiVersion] = {}
}
} else { } else {
versions[`${allVersions[version].version}`] = {} versions[`${allVersions[version].version}`] = {}
} }
}) }
return versions return versions
} }

View File

@@ -62,9 +62,9 @@ export default async function getCodeSamples(operation: Operation): Promise<Merg
// has the same description, add a number to the example // has the same description, add a number to the example
if (mergedExamples.length > 1) { if (mergedExamples.length > 1) {
const count: Record<string, number> = {} const count: Record<string, number> = {}
mergedExamples.forEach((item) => { for (const item of mergedExamples) {
count[item.request.description] = (count[item.request.description] || 0) + 1 count[item.request.description] = (count[item.request.description] || 0) + 1
}) }
const newMergedExamples = mergedExamples.map((example, i) => ({ const newMergedExamples = mergedExamples.map((example, i) => ({
...example, ...example,
@@ -204,7 +204,7 @@ export function getRequestExamples(operation: Operation): RequestExample[] {
// Requests can have multiple content types each with their own set of // Requests can have multiple content types each with their own set of
// examples. // examples.
Object.keys(operation.requestBody.content).forEach((contentType) => { for (const contentType of Object.keys(operation.requestBody.content)) {
let examples: Record<string, any> = {} let examples: Record<string, any> = {}
// This is a fallback to allow using the `example` property in // This is a fallback to allow using the `example` property in
// the schema. If we start to enforce using examples vs. example using // the schema. If we start to enforce using examples vs. example using
@@ -230,13 +230,13 @@ export function getRequestExamples(operation: Operation): RequestExample[] {
parameters: parameterExamples.default, parameters: parameterExamples.default,
}, },
}) })
return continue
} }
// There can be more than one example for a given content type. We need to // There can be more than one example for a given content type. We need to
// iterate over the keys of the examples to create individual // iterate over the keys of the examples to create individual
// example objects // example objects
Object.keys(examples).forEach((key) => { for (const key of Object.keys(examples)) {
// A content type that includes `+json` is a custom media type // A content type that includes `+json` is a custom media type
// The default accept header is application/vnd.github.v3+json // The default accept header is application/vnd.github.v3+json
// Which would have a content type of `application/json` // Which would have a content type of `application/json`
@@ -255,8 +255,8 @@ export function getRequestExamples(operation: Operation): RequestExample[] {
}, },
} }
requestExamples.push(example) requestExamples.push(example)
}) }
}) }
return requestExamples return requestExamples
} }
@@ -279,10 +279,10 @@ export function getRequestExamples(operation: Operation): RequestExample[] {
*/ */
export function getResponseExamples(operation: Operation): ResponseExample[] { export function getResponseExamples(operation: Operation): ResponseExample[] {
const responseExamples: ResponseExample[] = [] const responseExamples: ResponseExample[] = []
Object.keys(operation.responses).forEach((statusCode) => { for (const statusCode of Object.keys(operation.responses)) {
// We don't want to create examples for error codes // We don't want to create examples for error codes
// Error codes are displayed in the status table in the docs // Error codes are displayed in the status table in the docs
if (parseInt(statusCode, 10) >= 400) return if (parseInt(statusCode, 10) >= 400) continue
const content = operation.responses[statusCode].content const content = operation.responses[statusCode].content
@@ -298,12 +298,12 @@ export function getResponseExamples(operation: Operation): ResponseExample[] {
}, },
} }
responseExamples.push(example) responseExamples.push(example)
return continue
} }
// Responses can have multiple content types each with their own set of // Responses can have multiple content types each with their own set of
// examples. // examples.
Object.keys(content).forEach((contentType) => { for (const contentType of Object.keys(content)) {
let examples: Record<string, any> = {} let examples: Record<string, any> = {}
// This is a fallback to allow using the `example` property in // This is a fallback to allow using the `example` property in
// the schema. If we start to enforce using examples vs. example using // the schema. If we start to enforce using examples vs. example using
@@ -333,18 +333,18 @@ export function getResponseExamples(operation: Operation): ResponseExample[] {
}, },
} }
responseExamples.push(example) responseExamples.push(example)
return continue
} else { } else {
// Example for this content type doesn't exist. // Example for this content type doesn't exist.
// We could also check if there is a fully populated example // We could also check if there is a fully populated example
// directly in the response schema examples properties. // directly in the response schema examples properties.
return continue
} }
// There can be more than one example for a given content type. We need to // There can be more than one example for a given content type. We need to
// iterate over the keys of the examples to create individual // iterate over the keys of the examples to create individual
// example objects // example objects
Object.keys(examples).forEach((key) => { for (const key of Object.keys(examples)) {
const example = { const example = {
key, key,
response: { response: {
@@ -360,9 +360,9 @@ export function getResponseExamples(operation: Operation): ResponseExample[] {
}, },
} }
responseExamples.push(example) responseExamples.push(example)
}) }
}) }
}) }
return responseExamples return responseExamples
} }
@@ -383,7 +383,7 @@ export function getParameterExamples(operation: Operation): Record<string, Recor
} }
const parameters = operation.parameters.filter((param: any) => param.in === 'path') const parameters = operation.parameters.filter((param: any) => param.in === 'path')
const parameterExamples: Record<string, Record<string, any>> = {} const parameterExamples: Record<string, Record<string, any>> = {}
parameters.forEach((parameter: any) => { for (const parameter of parameters) {
const examples = parameter.examples const examples = parameter.examples
// If there are no examples, create an example from the uppercase parameter // If there are no examples, create an example from the uppercase parameter
// name, so that it is more visible that the value is fake data // name, so that it is more visible that the value is fake data
@@ -392,11 +392,11 @@ export function getParameterExamples(operation: Operation): Record<string, Recor
if (!parameterExamples.default) parameterExamples.default = {} if (!parameterExamples.default) parameterExamples.default = {}
parameterExamples.default[parameter.name] = parameter.name.toUpperCase() parameterExamples.default[parameter.name] = parameter.name.toUpperCase()
} else { } else {
Object.keys(examples).forEach((key) => { for (const key of Object.keys(examples)) {
if (!parameterExamples[key]) parameterExamples[key] = {} if (!parameterExamples[key]) parameterExamples[key] = {}
parameterExamples[key][parameter.name] = examples[key].value parameterExamples[key][parameter.name] = examples[key].value
}) }
} }
}) }
return parameterExamples return parameterExamples
} }

View File

@@ -69,7 +69,7 @@ export async function getSchemas(
export async function validateVersionsOptions(versions: string[]): Promise<void> { export async function validateVersionsOptions(versions: string[]): Promise<void> {
const schemas = await getSchemas() const schemas = await getSchemas()
// Validate individual versions provided // Validate individual versions provided
versions.forEach((version) => { for (const version of versions) {
if ( if (
schemas.deprecated.includes(`${version}.deref.json`) || schemas.deprecated.includes(`${version}.deref.json`) ||
schemas.unpublished.includes(`${version}.deref.json`) schemas.unpublished.includes(`${version}.deref.json`)
@@ -79,5 +79,5 @@ export async function validateVersionsOptions(versions: string[]): Promise<void>
} else if (!schemas.currentReleases.includes(`${version}.deref.json`)) { } else if (!schemas.currentReleases.includes(`${version}.deref.json`)) {
throw new Error(`🛑 The version (${version}) you specified is not valid.`) throw new Error(`🛑 The version (${version}) you specified is not valid.`)
} }
}) }
} }

View File

@@ -35,7 +35,7 @@ async function getClientSideRedirects(): Promise<RedirectMap> {
) )
const operationRedirects: RedirectMap = {} const operationRedirects: RedirectMap = {}
Object.values(operationUrls).forEach((value: OperationUrl) => { for (const value of Object.values(operationUrls)) {
const oldUrl = value.originalUrl.replace('/rest/reference', '/rest') const oldUrl = value.originalUrl.replace('/rest/reference', '/rest')
const anchor = oldUrl.split('#')[1] const anchor = oldUrl.split('#')[1]
const subcategory = value.subcategory const subcategory = value.subcategory
@@ -43,7 +43,7 @@ async function getClientSideRedirects(): Promise<RedirectMap> {
? `/rest/${value.category}/${subcategory}#${anchor}` ? `/rest/${value.category}/${subcategory}#${anchor}`
: `/rest/${value.category}#${anchor}` : `/rest/${value.category}#${anchor}`
operationRedirects[oldUrl] = redirectTo operationRedirects[oldUrl] = redirectTo
}) }
const redirects: RedirectMap = { const redirects: RedirectMap = {
...operationRedirects, ...operationRedirects,
...sectionUrls, ...sectionUrls,

View File

@@ -45,9 +45,9 @@ export default class Operation {
if (serverVariables) { if (serverVariables) {
// Template variables structure comes from OpenAPI server variables // Template variables structure comes from OpenAPI server variables
const templateVariables: Record<string, any> = {} const templateVariables: Record<string, any> = {}
Object.keys(serverVariables).forEach( for (const key of Object.keys(serverVariables)) {
(key) => (templateVariables[key] = serverVariables[key].default), templateVariables[key] = serverVariables[key].default
) }
this.serverUrl = parseTemplate(this.serverUrl).expand(templateVariables) this.serverUrl = parseTemplate(this.serverUrl).expand(templateVariables)
} }

View File

@@ -80,7 +80,7 @@ async function formatRestData(operations: Operation[]): Promise<OperationsByCate
const categories = [...new Set(operations.map((operation) => operation.category))].sort() const categories = [...new Set(operations.map((operation) => operation.category))].sort()
const operationsByCategory: OperationsByCategory = {} const operationsByCategory: OperationsByCategory = {}
categories.forEach((category) => { for (const category of categories) {
operationsByCategory[category] = {} operationsByCategory[category] = {}
const categoryOperations = operations.filter((operation) => operation.category === category) const categoryOperations = operations.filter((operation) => operation.category === category)
@@ -95,7 +95,7 @@ async function formatRestData(operations: Operation[]): Promise<OperationsByCate
subcategories.unshift(firstItem) subcategories.unshift(firstItem)
} }
subcategories.forEach((subcategory) => { for (const subcategory of subcategories) {
operationsByCategory[category][subcategory] = [] operationsByCategory[category][subcategory] = []
const subcategoryOperations = categoryOperations.filter( const subcategoryOperations = categoryOperations.filter(
@@ -103,8 +103,8 @@ async function formatRestData(operations: Operation[]): Promise<OperationsByCate
) )
operationsByCategory[category][subcategory] = subcategoryOperations operationsByCategory[category][subcategory] = subcategoryOperations
}) }
}) }
return operationsByCategory return operationsByCategory
} }

View File

@@ -98,10 +98,10 @@ async function getDataFrontmatter(
for (const file of fileList) { for (const file of fileList) {
const data = JSON.parse(await readFile(file, 'utf-8')) const data = JSON.parse(await readFile(file, 'utf-8'))
const docsVersionName = getDocsVersion(path.basename(path.dirname(file))) const docsVersionName = getDocsVersion(path.basename(path.dirname(file)))
Object.keys(data).forEach((category) => { for (const category of Object.keys(data)) {
// Used to automatically update Markdown files // Used to automatically update Markdown files
const subcategories = Object.keys(data[category]) const subcategories = Object.keys(data[category])
subcategories.forEach((subcategory) => { for (const subcategory of subcategories) {
if (!restVersions[category]) { if (!restVersions[category]) {
restVersions[category] = {} restVersions[category] = {}
} }
@@ -112,8 +112,8 @@ async function getDataFrontmatter(
} else if (!restVersions[category][subcategory].versions.includes(docsVersionName)) { } else if (!restVersions[category][subcategory].versions.includes(docsVersionName)) {
restVersions[category][subcategory].versions.push(docsVersionName) restVersions[category][subcategory].versions.push(docsVersionName)
} }
}) }
}) }
} }
return restVersions return restVersions
} }

View File

@@ -53,10 +53,11 @@ describe('rest example requests and responses', () => {
test('check example number and status code appear', async () => { test('check example number and status code appear', async () => {
const mergedExamples = await getCodeSamples(operation) const mergedExamples = await getCodeSamples(operation)
// example is any because getCodeSamples returns objects from untyped JavaScript module // example is any because getCodeSamples returns objects from untyped JavaScript module
mergedExamples.forEach((example: any, index: number) => { for (let index = 0; index < mergedExamples.length; index++) {
const example: any = mergedExamples[index]
expect(example.request.description).toBe( expect(example.request.description).toBe(
`Example ${index + 1}: Status Code ${example.response.statusCode}`, `Example ${index + 1}: Status Code ${example.response.statusCode}`,
) )
}) }
}) })
}) })

View File

@@ -74,25 +74,29 @@ describe('markdown for each rest version', () => {
if (isApiVersioned(version)) { if (isApiVersioned(version)) {
for (const apiVersion of allVersions[version].apiVersions) { for (const apiVersion of allVersions[version].apiVersions) {
const apiOperations = await getRest(version, apiVersion) const apiOperations = await getRest(version, apiVersion)
Object.keys(apiOperations).forEach((category) => allCategories.add(category)) for (const category of Object.keys(apiOperations)) {
allCategories.add(category)
}
openApiSchema[version] = apiOperations openApiSchema[version] = apiOperations
} }
} else { } else {
const apiOperations = await getRest(version) const apiOperations = await getRest(version)
Object.keys(apiOperations).forEach((category) => allCategories.add(category)) for (const category of Object.keys(apiOperations)) {
allCategories.add(category)
}
openApiSchema[version] = apiOperations openApiSchema[version] = apiOperations
} }
} }
// Read the versions from each index.md file to build a list of // Read the versions from each index.md file to build a list of
// applicable versions for each category // applicable versions for each category
walk('content/rest', { includeBasePath: true, directories: false }) for (const file of walk('content/rest', { includeBasePath: true, directories: false }).filter(
.filter((filename) => filename.includes('index.md')) (filename) => filename.includes('index.md'),
.forEach((file) => { )) {
const applicableVersions = getApplicableVersionFromFile(file) const applicableVersions = getApplicableVersionFromFile(file)
const { category } = getCategorySubcategory(file) const { category } = getCategorySubcategory(file)
categoryApplicableVersions[category] = applicableVersions categoryApplicableVersions[category] = applicableVersions
}) }
}) })
test('markdown file exists for every operationId prefix in all versions of the OpenAPI schema', async () => { test('markdown file exists for every operationId prefix in all versions of the OpenAPI schema', async () => {
@@ -115,7 +119,7 @@ describe('markdown for each rest version', () => {
test('category and subcategory exist in OpenAPI schema for every applicable version in markdown frontmatter', async () => { test('category and subcategory exist in OpenAPI schema for every applicable version in markdown frontmatter', async () => {
const automatedFiles = getAutomatedMarkdownFiles('content/rest') const automatedFiles = getAutomatedMarkdownFiles('content/rest')
automatedFiles.forEach((file) => { for (const file of automatedFiles) {
const applicableVersions = getApplicableVersionFromFile(file) const applicableVersions = getApplicableVersionFromFile(file)
const { category, subCategory } = getCategorySubcategory(file) const { category, subCategory } = getCategorySubcategory(file)
@@ -129,7 +133,7 @@ describe('markdown for each rest version', () => {
`The versions that apply to category ${category} does not contain the ${version}, as is expected. Please check the versions for file ${file} or look at the index that governs that file (in its parent directory).`, `The versions that apply to category ${category} does not contain the ${version}, as is expected. Please check the versions for file ${file} or look at the index that governs that file (in its parent directory).`,
).toContain(version) ).toContain(version)
} }
}) }
}) })
}) })
@@ -155,15 +159,14 @@ describe('OpenAPI schema validation', () => {
// even though the version is not yet supported in the docs) // even though the version is not yet supported in the docs)
test('every OpenAPI version must have a schema file in the docs', async () => { test('every OpenAPI version must have a schema file in the docs', async () => {
const decoratedFilenames = walk(schemasPath).map((filename) => path.basename(filename, '.json')) const decoratedFilenames = walk(schemasPath).map((filename) => path.basename(filename, '.json'))
Object.values(allVersions) const openApiBaseNames = Object.values(allVersions).map((version) => version.openApiVersionName)
.map((version) => version.openApiVersionName) for (const openApiBaseName of openApiBaseNames) {
.forEach((openApiBaseName) => { // Because the rest calendar dates now have latest, next, or calendar date attached to the name, we're
// Because the rest calendar dates now have latest, next, or calendar date attached to the name, we're // now checking if the decorated file names now start with an openApiBaseName
// now checking if the decorated file names now start with an openApiBaseName expect(
expect( decoratedFilenames.some((versionFile) => versionFile.startsWith(openApiBaseName)),
decoratedFilenames.some((versionFile) => versionFile.startsWith(openApiBaseName)), ).toBe(true)
).toBe(true) }
})
}) })
test('operations object structure organized by version, category, and subcategory', async () => { test('operations object structure organized by version, category, and subcategory', async () => {
@@ -214,10 +217,10 @@ describe('code examples are defined', () => {
expect(isPlainObject(operation)).toBe(true) expect(isPlainObject(operation)).toBe(true)
expect(operation.codeExamples).toBeDefined() expect(operation.codeExamples).toBeDefined()
// Code examples have dynamic structure from OpenAPI schema // Code examples have dynamic structure from OpenAPI schema
operation.codeExamples.forEach((example: any) => { for (const example of operation.codeExamples as any[]) {
expect(isPlainObject(example.request)).toBe(true) expect(isPlainObject(example.request)).toBe(true)
expect(isPlainObject(example.response)).toBe(true) expect(isPlainObject(example.response)).toBe(true)
}) }
} }
}) })
}) })

View File

@@ -29,7 +29,7 @@ describe('REST references docs', () => {
// These tests exists because of issue #1960 // These tests exists because of issue #1960
test('rest subcategory with fpt in URL', async () => { test('rest subcategory with fpt in URL', async () => {
for (const category of [ const categories = [
'migrations', 'migrations',
'actions', 'actions',
'activity', 'activity',
@@ -58,7 +58,8 @@ describe('REST references docs', () => {
'search', 'search',
'teams', 'teams',
'users', 'users',
]) { ]
for (const category of categories) {
// Without language prefix // Without language prefix
{ {
const res = await get(`/free-pro-team@latest/rest/reference/${category}`) const res = await get(`/free-pro-team@latest/rest/reference/${category}`)

View File

@@ -93,9 +93,9 @@ export function useAISearchLocalStorageCache<T = any>(
index.sort((a, b) => a.timestamp - b.timestamp) index.sort((a, b) => a.timestamp - b.timestamp)
const excess = index.length - maxEntries const excess = index.length - maxEntries
const entriesToRemove = index.slice(0, excess) const entriesToRemove = index.slice(0, excess)
entriesToRemove.forEach((entry) => { for (const entry of entriesToRemove) {
localStorage.removeItem(entry.key) localStorage.removeItem(entry.key)
}) }
index = index.slice(excess) index = index.slice(excess)
} }

View File

@@ -43,7 +43,7 @@ export function useMultiQueryParams() {
const [asPathWithoutHash] = router.asPath.split('#') const [asPathWithoutHash] = router.asPath.split('#')
const [asPathRoot, asPathQuery = ''] = asPathWithoutHash.split('?') const [asPathRoot, asPathQuery = ''] = asPathWithoutHash.split('?')
const searchParams = new URLSearchParams(asPathQuery) const searchParams = new URLSearchParams(asPathQuery)
initialKeys.forEach((key) => { for (const key of initialKeys) {
if (key === 'search-overlay-ask-ai') { if (key === 'search-overlay-ask-ai') {
if (newParams[key] === 'true') { if (newParams[key] === 'true') {
searchParams.set(key, 'true') searchParams.set(key, 'true')
@@ -57,7 +57,7 @@ export function useMultiQueryParams() {
searchParams.delete(key) searchParams.delete(key)
} }
} }
}) }
const paramsString = searchParams.toString() ? `?${searchParams.toString()}` : '' const paramsString = searchParams.toString() ? `?${searchParams.toString()}` : ''
let newUrl = `${asPathRoot}${paramsString}` let newUrl = `${asPathRoot}${paramsString}`
if (asPathRoot !== '/' && router.locale) { if (asPathRoot !== '/' && router.locale) {

Some files were not shown because too many files have changed in this diff Show More