1
0
mirror of synced 2025-12-19 09:57:42 -05:00

Enable github/array-foreach ESLint rule by converting all forEach to for loops (#58302)

Co-authored-by: Robert Sese <734194+rsese@users.noreply.github.com>
This commit is contained in:
Kevin Heis
2025-11-12 11:18:21 -08:00
committed by GitHub
parent 86216fb38b
commit 8adc699635
124 changed files with 777 additions and 718 deletions

View File

@@ -96,7 +96,6 @@ export default [
camelcase: 'off', // Many gh apis use underscores, 600+ uses
// Disabled rules to review
'github/array-foreach': 'off', // 250+
'no-console': 'off', // 800+
'@typescript-eslint/no-explicit-any': 'off', // 1000+
},

View File

@@ -48,9 +48,9 @@ const editorTypes: EditorTypes = {
const refinementDescriptions = (): string => {
let str = '\n\n'
Object.entries(editorTypes).forEach(([ed, edObj]) => {
for (const [ed, edObj] of Object.entries(editorTypes)) {
str += ` ${ed.padEnd(12)} ${edObj.description}\n`
})
}
return str
}
@@ -155,10 +155,10 @@ async function callEditor(
const prompt = yaml.load(fs.readFileSync(promptTemplatePath, 'utf8')) as PromptData
prompt.messages.forEach((msg) => {
for (const msg of prompt.messages) {
msg.content = msg.content.replace('{{markdownPrompt}}', markdownPrompt)
msg.content = msg.content.replace('{{input}}', content)
})
}
return callModelsApi(prompt)
}

View File

@@ -117,7 +117,7 @@ function extractExample(commentBlock: string): string {
function generateMarkdown(apiDocs: any[]): string {
let markdown = '## Reference: API endpoints\n\n'
apiDocs.forEach((doc) => {
for (const doc of apiDocs) {
markdown += `### ${doc.method.toUpperCase()} ${doc.path}\n\n`
markdown += `${doc.description}\n\n`
@@ -142,7 +142,7 @@ function generateMarkdown(apiDocs: any[]): string {
}
markdown += '---\n\n'
})
}
return markdown
}

View File

@@ -41,22 +41,16 @@ describe.each(allVersionKeys)('pagelist api for %s', async (versionKey) => {
expression = new RegExp(`/\\w{2}(/${versionKey})?/?.*`)
else expression = new RegExp(`/\\w{2}/${versionKey}/?.*`)
res.body
.trim()
.split('\n')
.forEach((permalink: string) => {
expect(permalink).toMatch(expression)
})
for (const permalink of res.body.trim().split('\n')) {
expect(permalink).toMatch(expression)
}
})
test('English requests only returns urls that contain /en', async () => {
const expression = new RegExp(`^/en(/${nonEnterpriseDefaultVersion})?/?.*`)
res.body
.trim()
.split('\n')
.forEach((permalink: string) => {
expect(permalink).toMatch(expression)
})
for (const permalink of res.body.trim().split('\n')) {
expect(permalink).toMatch(expression)
}
})
})

View File

@@ -25,9 +25,7 @@ const images = await Promise.all(
return { relativePath, width, height, size }
}),
)
images
.sort((a, b) => b.size - a.size)
.forEach((image) => {
const { relativePath, width, height } = image
console.log(`${width} x ${height} - ${relativePath}`)
})
for (const image of images.sort((a, b) => b.size - a.size)) {
const { relativePath, width, height } = image
console.log(`${width} x ${height} - ${relativePath}`)
}

View File

@@ -317,14 +317,14 @@ export async function filterAndUpdateGhesDataByAllowlistValues({
// Categorizes the given array of audit log events by event category
function categorizeEvents(events: AuditLogEventT[]) {
const categorizedEvents: CategorizedEvents = {}
events.forEach((event) => {
for (const event of events) {
const [category] = event.action.split('.')
if (!Object.hasOwn(categorizedEvents, category)) {
categorizedEvents[category] = []
}
categorizedEvents[category].push(event)
})
}
return categorizedEvents
}

View File

@@ -180,7 +180,7 @@ async function main() {
await mkdirp(auditLogVersionDirPath)
}
Object.values(AUDIT_LOG_PAGES).forEach(async (page) => {
for (const page of Object.values(AUDIT_LOG_PAGES)) {
const auditLogSchemaFilePath = path.join(auditLogVersionDirPath, `${page}.json`)
if (auditLogData[version][page]) {
@@ -188,9 +188,8 @@ async function main() {
auditLogSchemaFilePath,
JSON.stringify(auditLogData[version][page], null, 2),
)
console.log(`✅ Wrote ${auditLogSchemaFilePath}`)
}
})
}
}
}

View File

@@ -31,10 +31,10 @@ describe('Audit log fields functionality', () => {
if (eventWithFields) {
expect(Array.isArray(eventWithFields.fields)).toBe(true)
eventWithFields.fields!.forEach((field) => {
for (const field of eventWithFields.fields!) {
expect(typeof field).toBe('string')
expect(field.length).toBeGreaterThan(0)
})
}
}
})
@@ -42,14 +42,14 @@ describe('Audit log fields functionality', () => {
// Some events might not have fields, this should not break anything
const events = getAuditLogEvents('organization', 'enterprise-cloud@latest')
events.forEach((event) => {
for (const event of events) {
expect(event).toHaveProperty('action')
expect(event).toHaveProperty('description')
// fields property is optional
if (event.fields) {
expect(Array.isArray(event.fields)).toBe(true)
}
})
}
})
test('should include common audit log fields', () => {
@@ -82,19 +82,19 @@ describe('Audit log fields functionality', () => {
expect(categories.length).toBeGreaterThan(0)
// Check that events in categories have proper structure including fields
categories.forEach((category) => {
for (const category of categories) {
const events = categorizedEvents[category]
expect(Array.isArray(events)).toBe(true)
events.forEach((event: AuditLogEventT) => {
for (const event of events as AuditLogEventT[]) {
expect(event).toHaveProperty('action')
expect(event).toHaveProperty('description')
// fields is optional but if present should be array
if (event.fields) {
expect(Array.isArray(event.fields)).toBe(true)
}
})
})
}
}
})
test('should preserve fields data through categorization', () => {
@@ -127,12 +127,12 @@ describe('Audit log fields functionality', () => {
test('should not have duplicate fields in same event', () => {
const events = getAuditLogEvents('organization', 'enterprise-cloud@latest')
events.forEach((event) => {
for (const event of events) {
if (event.fields) {
const uniqueFields = new Set(event.fields)
expect(uniqueFields.size).toBe(event.fields.length)
}
})
}
})
test('should have reasonable field names', () => {
@@ -140,7 +140,7 @@ describe('Audit log fields functionality', () => {
const eventWithFields = events.find((event) => event.fields && event.fields.length > 0)
if (eventWithFields) {
eventWithFields.fields!.forEach((field) => {
for (const field of eventWithFields.fields!) {
// Field names should be reasonable strings
expect(field).toBeTruthy()
expect(typeof field).toBe('string')
@@ -149,33 +149,33 @@ describe('Audit log fields functionality', () => {
// Should not contain special characters that would break display
expect(field).not.toMatch(/[<>'"&]/)
})
}
}
})
test('should handle different page types consistently', () => {
const pageTypes = ['organization', 'enterprise', 'user']
pageTypes.forEach((pageType) => {
for (const pageType of pageTypes) {
try {
const events = getAuditLogEvents(pageType, 'enterprise-cloud@latest')
events.forEach((event) => {
for (const event of events) {
expect(event).toHaveProperty('action')
expect(event).toHaveProperty('description')
if (event.fields) {
expect(Array.isArray(event.fields)).toBe(true)
event.fields.forEach((field) => {
for (const field of event.fields) {
expect(typeof field).toBe('string')
})
}
}
})
}
} catch (error) {
// Some page types might not exist for certain versions, that's ok
console.log(`Skipping ${pageType} page type due to: ${error}`)
}
})
}
})
})
@@ -194,9 +194,9 @@ describe('Audit log fields functionality', () => {
if (fields) {
expect(Array.isArray(fields)).toBe(true)
fields.forEach((field) => {
for (const field of fields) {
expect(typeof field).toBe('string')
})
}
}
})
})

View File

@@ -16,10 +16,10 @@ describe('audit log category notes', () => {
test('category notes are strings', () => {
if (config.categoryNotes) {
Object.values(config.categoryNotes).forEach((note) => {
for (const note of Object.values(config.categoryNotes)) {
expect(typeof note).toBe('string')
expect(note.length).toBeGreaterThan(0)
})
}
}
})
@@ -51,13 +51,13 @@ describe('audit log category notes', () => {
expect(Object.keys(enterpriseEvents).length).toBeGreaterThan(0)
// Each category should still contain arrays of events
Object.values(organizationEvents).forEach((events) => {
for (const events of Object.values(organizationEvents)) {
expect(Array.isArray(events)).toBe(true)
if (events.length > 0) {
expect(events[0]).toHaveProperty('action')
expect(events[0]).toHaveProperty('description')
}
})
}
})
test('category notes are properly typed', () => {

View File

@@ -399,7 +399,9 @@ async function getIndexFileVersions(
throw new Error(`Frontmatter in ${filepath} does not contain versions.`)
}
const fmVersions = getApplicableVersions(data.versions)
fmVersions.forEach((version: string) => versions.add(version))
for (const version of fmVersions) {
versions.add(version)
}
}),
)
const versionArray = [...versions]
@@ -431,7 +433,7 @@ export async function convertVersionsToFrontmatter(
// Currently, only GHES is numbered. Number releases have to be
// handled differently because they use semantic versioning.
versions.forEach((version) => {
for (const version of versions) {
const docsVersion = allVersions[version]
if (!docsVersion.hasNumberedReleases) {
frontmatterVersions[docsVersion.shortName] = '*'
@@ -455,10 +457,10 @@ export async function convertVersionsToFrontmatter(
numberedReleases[docsVersion.shortName].availableReleases[i] = docsVersion.currentRelease
}
}
})
}
// Create semantic versions for numbered releases
Object.keys(numberedReleases).forEach((key) => {
for (const key of Object.keys(numberedReleases)) {
const availableReleases = numberedReleases[key].availableReleases
const versionContinuity = checkVersionContinuity(availableReleases)
if (availableReleases.every(Boolean)) {
@@ -483,7 +485,7 @@ export async function convertVersionsToFrontmatter(
}
frontmatterVersions[key] = semVer.join(' ')
}
})
}
const sortedFrontmatterVersions = Object.keys(frontmatterVersions)
.sort()
.reduce((acc: { [key: string]: string }, key) => {

View File

@@ -88,9 +88,9 @@ describe('automated content directory updates', () => {
// Because of that, we need to update the content paths to use the
// full file path.
const contentDataFullPath: { [key: string]: ContentItem } = {}
Object.keys(newContentData).forEach(
(key: string) => (contentDataFullPath[path.join(targetDirectory, key)] = newContentData[key]),
)
for (const key of Object.keys(newContentData)) {
contentDataFullPath[path.join(targetDirectory, key)] = newContentData[key]
}
// Rewrites the content directory in the operating system's
// temp directory.

View File

@@ -130,11 +130,11 @@ export async function convertContentToDocs(
// There are some keywords like [Plumbing] used by the code comments
// but we don't want to render them in the docs.
if (node.type === 'text' && node.value) {
removeKeywords.forEach((keyword) => {
for (const keyword of removeKeywords) {
if (node.value.includes(keyword)) {
node.value = node.value.replace(keyword, '').trim()
}
})
}
}
// The subsections under the main headings (level 2) are commands

View File

@@ -82,13 +82,13 @@ export async function getLintableYml(dataFilePath: string): Promise<Record<strin
// 'data/variables/product.yml /pat_v1_caps'
function addPathToKey(mdDictMap: Map<string, string>, dataFilePath: string): Map<string, string> {
const keys = Array.from(mdDictMap.keys())
keys.forEach((key) => {
for (const key of keys) {
const newKey = `${dataFilePath} ${key}`
const value = mdDictMap.get(key)
if (value !== undefined) {
mdDictMap.delete(key)
mdDictMap.set(newKey, value)
}
})
}
return mdDictMap
}

View File

@@ -24,7 +24,7 @@ interface ProcessedValidationError {
export function formatAjvErrors(errors: AjvValidationError[] = []): ProcessedValidationError[] {
const processedErrors: ProcessedValidationError[] = []
errors.forEach((errorObj: AjvValidationError) => {
for (const errorObj of errors) {
const error: Partial<ProcessedValidationError> = {}
error.instancePath =
@@ -58,7 +58,7 @@ export function formatAjvErrors(errors: AjvValidationError[] = []): ProcessedVal
}
processedErrors.push(error as ProcessedValidationError)
})
}
return processedErrors
}

View File

@@ -114,11 +114,12 @@ export function filterTokensByOrder(
// first token (root) in the tokenOrder array
const tokenRootIndexes: number[] = []
const firstTokenOrderType = tokenOrder[0]
tokens.forEach((token, index) => {
for (let index = 0; index < tokens.length; index++) {
const token = tokens[index]
if (token.type === firstTokenOrderType) {
tokenRootIndexes.push(index)
}
})
}
// Loop through each root token index and check if
// the order matches the tokenOrder array

View File

@@ -17,9 +17,10 @@ export const codeAnnotationCommentSpacing = {
const lines = content.split('\n')
lines.forEach((line: string, index: number) => {
for (let index = 0; index < lines.length; index++) {
const line: string = lines[index]
const trimmedLine = line.trim()
if (!trimmedLine) return
if (!trimmedLine) continue
// Define a map of comment patterns
const commentPatterns: Record<string, RegExp> = {
@@ -46,7 +47,7 @@ export const codeAnnotationCommentSpacing = {
if (commentMatch && restOfLine !== null && commentChar !== null) {
// Skip shebang lines (#!/...)
if (trimmedLine.startsWith('#!')) {
return
continue
}
// Allow empty comments or comments with exactly one space
@@ -75,7 +76,7 @@ export const codeAnnotationCommentSpacing = {
)
}
// Single space or empty - this is correct
return
continue
} else {
// No space after comment character - this is an error
const lineNumber: number = token.lineNumber + index + 1
@@ -97,7 +98,7 @@ export const codeAnnotationCommentSpacing = {
)
}
}
})
}
})
},
}

View File

@@ -19,15 +19,15 @@ export const expiredContent: Rule = {
(token: MarkdownToken) => token.type === 'inline' || token.type === 'html_block',
)
tokensToCheck.forEach((token: MarkdownToken) => {
for (const token of tokensToCheck) {
// Looking for just opening tag with format:
// <!-- expires yyyy-mm-dd -->
const match = token.content?.match(/<!--\s*expires\s(\d\d\d\d)-(\d\d)-(\d\d)\s*-->/)
if (!match || !token.content) return
if (!match || !token.content) continue
const expireDate = new Date(match.splice(1, 3).join(' '))
const today = new Date()
if (today < expireDate) return
if (today < expireDate) continue
// We want the content split by line since not all token.content is in one line
// to get the correct range of the expired content. Below is how markdownlint
@@ -44,7 +44,7 @@ export const expiredContent: Rule = {
[startRange, match[0].length],
null, // No fix possible
)
})
}
},
}
@@ -68,11 +68,11 @@ export const expiringSoon: Rule = {
(token: MarkdownToken) => token.type === 'inline' || token.type === 'html_block',
)
tokensToCheck.forEach((token: MarkdownToken) => {
for (const token of tokensToCheck) {
// Looking for just opening tag with format:
// <!-- expires yyyy-mm-dd -->
const match = token.content?.match(/<!--\s*expires\s(\d\d\d\d)-(\d\d)-(\d\d)\s*-->/)
if (!match || !token.content) return
if (!match || !token.content) continue
const expireDate = new Date(match.splice(1, 3).join(' '))
const today = new Date()
@@ -80,7 +80,7 @@ export const expiringSoon: Rule = {
futureDate.setDate(today.getDate() + DAYS_TO_WARN_BEFORE_EXPIRED)
// Don't set warning if the content is already expired or
// if the content expires later than the DAYS_TO_WARN_BEFORE_EXPIRED
if (today > expireDate || expireDate > futureDate) return
if (today > expireDate || expireDate > futureDate) continue
addError(
onError,
@@ -90,6 +90,6 @@ export const expiringSoon: Rule = {
[token.content.indexOf(match[0]) + 1, match[0].length],
null, // No fix possible
)
})
}
},
}

View File

@@ -81,7 +81,7 @@ export const frontmatterLandingRecommended = {
const duplicates: string[] = []
const invalidPaths: string[] = []
fm.recommended.forEach((item: string) => {
for (const item of fm.recommended) {
if (seen.has(item)) {
duplicates.push(item)
} else {
@@ -92,7 +92,7 @@ export const frontmatterLandingRecommended = {
if (!isValidArticlePath(item, params.name)) {
invalidPaths.push(item)
}
})
}
if (duplicates.length > 0) {
addError(

View File

@@ -23,8 +23,8 @@ export const frontmatterVersionsWhitespace: Rule = {
if (fmStartIndex === -1) return
// Check each version entry for whitespace issues
Object.entries(versionsObj).forEach(([key, value]) => {
if (typeof value !== 'string') return
for (const [key, value] of Object.entries(versionsObj)) {
if (typeof value !== 'string') continue
const hasUnwantedWhitespace = checkForUnwantedWhitespace(value)
if (hasUnwantedWhitespace) {
@@ -55,7 +55,7 @@ export const frontmatterVersionsWhitespace: Rule = {
)
}
}
})
}
},
}

View File

@@ -63,9 +63,11 @@ export const journeyTracksGuidePathExists = {
const journeyTracksLineNumber = params.lines.indexOf(journeyTracksLine) + 1
fm.journeyTracks.forEach((track: any, trackIndex: number) => {
for (let trackIndex = 0; trackIndex < fm.journeyTracks.length; trackIndex++) {
const track: any = fm.journeyTracks[trackIndex]
if (track.guides && Array.isArray(track.guides)) {
track.guides.forEach((guide: string, guideIndex: number) => {
for (let guideIndex = 0; guideIndex < track.guides.length; guideIndex++) {
const guide: string = track.guides[guideIndex]
if (typeof guide === 'string') {
if (!isValidGuidePath(guide, params.name)) {
addError(
@@ -76,8 +78,8 @@ export const journeyTracksGuidePathExists = {
)
}
}
})
}
}
})
}
},
}

View File

@@ -22,7 +22,8 @@ export const journeyTracksLiquid = {
? params.lines.indexOf(journeyTracksLine) + 1
: 1
fm.journeyTracks.forEach((track: any, trackIndex: number) => {
for (let trackIndex = 0; trackIndex < fm.journeyTracks.length; trackIndex++) {
const track: any = fm.journeyTracks[trackIndex]
// Try to find the line number for this specific journey track so we can use that for the error
// line number. Getting the exact line number is probably more work than it's worth for this
// particular rule.
@@ -57,7 +58,7 @@ export const journeyTracksLiquid = {
{ name: 'description', value: track.description },
]
properties.forEach((prop) => {
for (const prop of properties) {
if (prop.value && typeof prop.value === 'string') {
try {
liquid.parse(prop.value)
@@ -70,10 +71,11 @@ export const journeyTracksLiquid = {
)
}
}
})
}
if (track.guides && Array.isArray(track.guides)) {
track.guides.forEach((guide: string, guideIndex: number) => {
for (let guideIndex = 0; guideIndex < track.guides.length; guideIndex++) {
const guide: string = track.guides[guideIndex]
if (typeof guide === 'string') {
try {
liquid.parse(guide)
@@ -86,8 +88,8 @@ export const journeyTracksLiquid = {
)
}
}
})
}
}
})
}
},
}

View File

@@ -48,11 +48,12 @@ export const journeyTracksUniqueIds = {
// Track seen journey track IDs and line number for error reporting
const seenIds = new Map<string, number>()
fm.journeyTracks.forEach((track: any, index: number) => {
if (!track || typeof track !== 'object') return
for (let index = 0; index < fm.journeyTracks.length; index++) {
const track: any = fm.journeyTracks[index]
if (!track || typeof track !== 'object') continue
const trackId = track.id
if (!trackId || typeof trackId !== 'string') return
if (!trackId || typeof trackId !== 'string') continue
const currentLineNumber = getTrackLineNumber(index)
@@ -66,6 +67,6 @@ export const journeyTracksUniqueIds = {
} else {
seenIds.set(trackId, currentLineNumber)
}
})
}
},
}

View File

@@ -128,7 +128,7 @@ function validateIfversionConditionals(cond: string, possibleVersionNames: Set<s
// Note that Lengths 1 and 2 may be used with feature-based versioning, but NOT Length 3.
const condParts = cond.split(/ (or|and) /).filter((part) => !(part === 'or' || part === 'and'))
condParts.forEach((str) => {
for (const str of condParts) {
const strParts = str.split(' ')
// if length = 1, this should be a valid short version or feature version name.
if (strParts.length === 1) {
@@ -192,7 +192,7 @@ function validateIfversionConditionals(cond: string, possibleVersionNames: Set<s
)
}
}
})
}
return errors
}

View File

@@ -54,7 +54,7 @@ export const thirdPartyActionPinning: Rule = {
const steps = getWorkflowSteps(yamlObj)
if (!steps.some((step) => step.uses)) return
steps.forEach((step) => {
for (const step of steps) {
if (step.uses) {
const actionMatch = step.uses.match(actionRegex)
if (actionMatch) {
@@ -71,7 +71,7 @@ export const thirdPartyActionPinning: Rule = {
}
}
}
})
}
} catch (e) {
if (e instanceof yaml.YAMLException) {
console.log('YAML Exception file:', params.name)

View File

@@ -43,7 +43,7 @@ export const yamlScheduledJobs: Rule = {
if (!yamlObj.on) return
if (!yamlObj.on.schedule) return
yamlObj.on.schedule.forEach((schedule: YamlSchedule) => {
for (const schedule of yamlObj.on.schedule) {
if (schedule.cron.split(' ')[0] === '0') {
addError(
onError,
@@ -57,13 +57,13 @@ export const yamlScheduledJobs: Rule = {
addError(
onError,
getLineNumber(token.content!, schedule.cron) + token.lineNumber,
`YAML scheduled workflow must be unique`,
`YAML scheduled workflow must not use the same cron schedule as another workflow`,
schedule.cron,
)
} else {
scheduledYamlJobs.push(schedule.cron)
}
})
scheduledYamlJobs.push(schedule.cron)
}
})
},
}

View File

@@ -58,20 +58,22 @@ childProcess.on('close', (code: number | null) => {
)
console.log(`${Object.values(markdownViolations).flat().length} violations found.`)
Object.entries(markdownViolations).forEach(
([fileName, results]: [string, Array<{ lineNumber: number }>]) => {
console.log(fileName)
console.log(results)
const fileLines = fs.readFileSync(fileName, 'utf8').split('\n')
results.forEach((result) => {
matchingRulesFound++
const lineIndex = result.lineNumber - 1
const offendingLine = fileLines[lineIndex]
fileLines[lineIndex] = offendingLine.concat(` <!-- markdownlint-disable-line ${rule} -->`)
})
fs.writeFileSync(fileName, fileLines.join('\n'), 'utf8')
},
)
const violationEntries = Object.entries(markdownViolations) as [
string,
Array<{ lineNumber: number }>,
][]
for (const [fileName, results] of violationEntries) {
console.log(fileName)
console.log(results)
const fileLines = fs.readFileSync(fileName, 'utf8').split('\n')
for (const result of results) {
matchingRulesFound++
const lineIndex = result.lineNumber - 1
const offendingLine = fileLines[lineIndex]
fileLines[lineIndex] = offendingLine.concat(` <!-- markdownlint-disable-line ${rule} -->`)
}
fs.writeFileSync(fileName, fileLines.join('\n'), 'utf8')
}
console.log(`${matchingRulesFound} violations ignored.`)
})

View File

@@ -197,7 +197,7 @@ async function main() {
customRules: configuredRules.yml,
})) as LintResults
Object.entries(resultYmlFile).forEach(([key, value]) => {
for (const [key, value] of Object.entries(resultYmlFile)) {
if ((value as LintError[]).length) {
const errors = (value as LintError[]).map((error) => {
// Autofixing would require us to write the changes back to the YML
@@ -209,7 +209,7 @@ async function main() {
})
resultYml[key] = errors
}
})
}
}
// There are no collisions when assigning the results to the new object
@@ -219,10 +219,10 @@ async function main() {
// Merge in the results for frontmatter tests, which could be
// in a file that already exists as a key in the `results` object.
Object.entries(resultFrontmatter).forEach(([key, value]) => {
for (const [key, value] of Object.entries(resultFrontmatter)) {
if (results[key]) results[key].push(...(value as LintError[]))
else results[key] = value as LintError[]
})
}
// Apply markdownlint fixes if available and rewrite the files
let countFixedFiles = 0
@@ -476,7 +476,7 @@ function reportSummaryByRule(results: LintResults, config: LintConfig): void {
// the default property is not actually a rule
delete ruleCount.default
Object.keys(results).forEach((key) => {
for (const key of Object.keys(results)) {
if (results[key].length > 0) {
for (const flaw of results[key]) {
const ruleName = flaw.ruleNames[1]
@@ -485,7 +485,7 @@ function reportSummaryByRule(results: LintResults, config: LintConfig): void {
ruleCount[ruleName] = count + 1
}
}
})
}
}
/*
@@ -498,26 +498,26 @@ function getFormattedResults(
isInPrecommitMode: boolean,
): FormattedResults {
const output: FormattedResults = {}
Object.entries(allResults)
const filteredResults = Object.entries(allResults)
// Each result key always has an array value, but it may be empty
.filter(([, results]) => results.length)
.forEach(([key, fileResults]) => {
if (verbose) {
output[key] = fileResults.map((flaw: LintError) => formatResult(flaw, isInPrecommitMode))
} else {
const formattedResults = fileResults.map((flaw: LintError) =>
formatResult(flaw, isInPrecommitMode),
)
for (const [key, fileResults] of filteredResults) {
if (verbose) {
output[key] = fileResults.map((flaw: LintError) => formatResult(flaw, isInPrecommitMode))
} else {
const formattedResults = fileResults.map((flaw: LintError) =>
formatResult(flaw, isInPrecommitMode),
)
// Only add the file to output if there are results after filtering
if (formattedResults.length > 0) {
const errors = formattedResults.filter((result) => result.severity === 'error')
const warnings = formattedResults.filter((result) => result.severity === 'warning')
const sortedResult = [...errors, ...warnings]
output[key] = [...sortedResult]
}
// Only add the file to output if there are results after filtering
if (formattedResults.length > 0) {
const errors = formattedResults.filter((result) => result.severity === 'error')
const warnings = formattedResults.filter((result) => result.severity === 'warning')
const sortedResult = [...errors, ...warnings]
output[key] = [...sortedResult]
}
})
}
}
return output
}

View File

@@ -46,9 +46,9 @@ describe.skip('category pages', () => {
// Combine those to fit vitest's `.each` usage
const productTuples = zip(productNames, productIndices) as [string, string][]
// Use a regular forEach loop to generate the `describe(...)` blocks
// Use a regular for...of loop to generate the `describe(...)` blocks
// otherwise, if one of them has no categories, the tests will fail.
productTuples.forEach((tuple) => {
for (const tuple of productTuples) {
const [, productIndex] = tuple
// Get links included in product index page.
// Each link corresponds to a product subdirectory (category).
@@ -196,11 +196,11 @@ describe.skip('category pages', () => {
})
test('contains only articles and subcategories with versions that are also available in the parent category', () => {
Object.entries(articleVersions).forEach(([articleName, versions]) => {
for (const [articleName, versions] of Object.entries(articleVersions)) {
const unexpectedVersions = difference(versions, categoryVersions)
const errorMessage = `${articleName} has versions that are not available in parent category`
expect(unexpectedVersions.length, errorMessage).toBe(0)
})
}
})
test('slugified title matches parent directory name', () => {
@@ -229,7 +229,7 @@ describe.skip('category pages', () => {
})
},
)
})
}
})
function getPath(productDir: string, link: string, filename: string) {

View File

@@ -30,14 +30,14 @@ describe('lint learning tracks', () => {
// Using any[] for toLint since it contains mixed string content from various YAML properties
const toLint: any[] = []
// Using any for destructured params as YAML structure varies across different learning track files
Object.values(yamlContent).forEach(({ title, description }: any) => {
for (const { title, description } of Object.values(yamlContent) as any[]) {
toLint.push(title)
toLint.push(description)
})
}
toLint.forEach((element) => {
for (const element of toLint) {
expect(() => liquid.parse(element), `${element} contains invalid liquid`).not.toThrow()
})
}
})
})
})

View File

@@ -43,14 +43,14 @@ describe('data references', () => {
variables.map(async (variablesPerFile) => {
const variableRefs = getDataReferences(JSON.stringify(variablesPerFile))
variableRefs.forEach((key: string) => {
for (const key of variableRefs) {
const value = getDataByLanguage(key, 'en')
if (typeof value !== 'string') {
const filename = getFilenameByValue(allVariables, variablesPerFile)
const variableFile = path.join('data/variables', filename || '')
errors.push({ key, value, variableFile })
}
})
}
}),
)

View File

@@ -153,7 +153,7 @@ This is a test.
describe(frontmatterVersionsWhitespace.names.join(' - '), () => {
describe('valid cases', () => {
validCases.forEach(({ name, content }) => {
for (const { name, content } of validCases) {
test(`${name} should pass`, async () => {
const result = await runRule(frontmatterVersionsWhitespace, {
strings: { content },
@@ -161,11 +161,11 @@ describe(frontmatterVersionsWhitespace.names.join(' - '), () => {
})
expect(result.content.length).toBe(0)
})
})
}
})
describe('invalid cases', () => {
invalidCases.forEach(({ name, content, expectedErrors, expectedMessage }) => {
for (const { name, content, expectedErrors, expectedMessage } of invalidCases) {
test(`${name} should fail`, async () => {
const result = await runRule(frontmatterVersionsWhitespace, {
strings: { content },
@@ -177,7 +177,7 @@ describe(frontmatterVersionsWhitespace.names.join(' - '), () => {
expect(result.content[0].errorDetail).toBe(expectedMessage)
}
})
})
}
})
describe('fixable errors', () => {

View File

@@ -99,10 +99,11 @@ async function selectFromOptions(
promptFn: (question: string) => Promise<string>,
): Promise<string> {
console.log(chalk.yellow(`\n${message} (${paramName}):`))
options.forEach((option, index) => {
for (let index = 0; index < options.length; index++) {
const option = options[index]
const letter = String.fromCharCode(97 + index) // 97 is 'a' in ASCII
console.log(chalk.white(` ${letter}. ${option}`))
})
}
let attempts = 0
while (true) {
@@ -201,11 +202,11 @@ function validateCTAParams(params: CTAParams): { isValid: boolean; errors: strin
function buildCTAUrl(baseUrl: string, params: CTAParams): string {
const url = new URL(baseUrl)
Object.entries(params).forEach(([key, value]) => {
for (const [key, value] of Object.entries(params)) {
if (value) {
url.searchParams.set(key, value)
}
})
}
return url.toString()
}
@@ -277,11 +278,11 @@ export function convertOldCTAUrl(oldUrl: string): { newUrl: string; notes: strin
newUrl.searchParams.delete('ref_page')
// Add new CTA parameters
Object.entries(newParams).forEach(([key, value]) => {
for (const [key, value] of Object.entries(newParams)) {
if (value) {
newUrl.searchParams.set(key, value)
}
})
}
// The URL constructor may add a slash before the question mark in
// "github.com?foo", but we don't want that. First, check if original
@@ -417,7 +418,9 @@ async function interactiveBuilder(): Promise<void> {
if (!validation.isValid) {
console.log(chalk.red('\n❌ Validation Errors:'))
validation.errors.forEach((error) => console.log(chalk.red(`${error}`)))
for (const error of validation.errors) {
console.log(chalk.red(`${error}`))
}
rl.close()
return
}
@@ -428,11 +431,11 @@ async function interactiveBuilder(): Promise<void> {
console.log(chalk.green('\n✅ CTA URL generated successfully!'))
console.log(chalk.white.bold('\nParameters summary:'))
Object.entries(params).forEach(([key, value]) => {
for (const [key, value] of Object.entries(params)) {
if (value) {
console.log(chalk.white(` ${key}: ${value}`))
}
})
}
console.log(chalk.white.bold('\nYour CTA URL:'))
console.log(chalk.cyan(ctaUrl))
@@ -474,7 +477,9 @@ async function convertUrls(options: { url?: string; quiet?: boolean }): Promise<
if (!validation.isValid) {
console.log(chalk.red('\n❌ Validation errors in converted URL:'))
validation.errors.forEach((message) => console.log(chalk.red(`${message}`)))
for (const message of validation.errors) {
console.log(chalk.red(`${message}`))
}
}
} catch (validationError) {
console.log(chalk.red(`\n❌ Failed to validate new URL: ${validationError}`))
@@ -482,7 +487,9 @@ async function convertUrls(options: { url?: string; quiet?: boolean }): Promise<
if (result.notes.length) {
console.log(chalk.white('\n👉 Notes:'))
result.notes.forEach((note) => console.log(` ${note}`))
for (const note of result.notes) {
console.log(` ${note}`)
}
}
} else {
if (!options.quiet) {
@@ -534,12 +541,14 @@ async function validateUrl(options: { url?: string }): Promise<void> {
if (validation.isValid) {
console.log(chalk.green('\n✅ URL is valid'))
console.log(chalk.white('\nCTA parameters found:'))
Object.entries(ctaParams).forEach(([key, value]) => {
for (const [key, value] of Object.entries(ctaParams)) {
console.log(chalk.white(` ${key}: ${value}`))
})
}
} else {
console.log(chalk.red('\n❌ Validation errors:'))
validation.errors.forEach((message) => console.log(chalk.red(`${message}`)))
for (const message of validation.errors) {
console.log(chalk.red(`${message}`))
}
console.log(
chalk.yellow(
'\n💡 Try: npm run cta-builder -- convert --url "your-url" to auto-fix old format URLs',
@@ -596,9 +605,9 @@ async function buildProgrammaticCTA(options: {
const validation = validateCTAParams(params)
if (!validation.isValid) {
// Output validation errors to stderr and exit with error code
validation.errors.forEach((error) => {
for (const error of validation.errors) {
console.error(`Validation error: ${error}`)
})
}
process.exit(1)
}

View File

@@ -370,7 +370,9 @@ function removeFromChildren(oldPath: string, opts: MoveOptions): PositionInfo {
const childGroupPositions: number[][] = []
;((data && data[CHILDGROUPS_KEY]) || []).forEach((group: any, i: number) => {
const childGroups = (data && data[CHILDGROUPS_KEY]) || []
for (let i = 0; i < childGroups.length; i++) {
const group = childGroups[i]
if (group.children) {
group.children = group.children.filter((entry: any, j: number) => {
if (entry === oldName || entry === `/${oldName}`) {
@@ -380,7 +382,7 @@ function removeFromChildren(oldPath: string, opts: MoveOptions): PositionInfo {
return true
})
}
})
}
if (data) {
fs.writeFileSync(
@@ -449,10 +451,11 @@ function moveFiles(files: FileTuple[], opts: MoveOptions) {
for (const [oldPath] of files) {
const fileContent = fs.readFileSync(oldPath, 'utf-8')
const { errors } = fm(fileContent, { filepath: oldPath })
errors.forEach((error, i) => {
for (let i = 0; i < errors.length; i++) {
const error = errors[i]
if (!i) console.warn(chalk.yellow(`Error parsing file (${oldPath}) frontmatter:`))
console.error(`${chalk.red(error.message)}: ${chalk.yellow(error.reason)}`)
})
}
if (errors.length > 0) throw new Error('There were more than 0 parse errors')
}
@@ -668,12 +671,13 @@ function changeFeaturedLinks(oldHref: string, newHref: string): void {
if (key === 'popularHeading') {
continue
}
entries.forEach((entry, i) => {
for (let i = 0; i < entries.length; i++) {
const entry = entries[i]
if (regex.test(entry)) {
entries[i] = entry.replace(regex, `${newHref}$1`)
changed = true
}
})
}
}
if (changed) {

View File

@@ -133,11 +133,11 @@ describe('renderContent', () => {
const html = await renderContent(template)
const $ = cheerio.load(html, { xmlMode: true })
;[1, 2, 3, 4, 5].forEach((level) => {
for (const level of [1, 2, 3, 4, 5]) {
expect(
$(`h${level}#this-is-a-level-${level} a[href="#this-is-a-level-${level}"]`).length,
).toBe(1)
})
}
})
test('does syntax highlighting', async () => {

View File

@@ -54,7 +54,7 @@ export default function dataDirectory(
fs.readFileSync(filename, 'utf8'),
])
files.forEach(([filename, fileContent]) => {
for (const [filename, fileContent] of files) {
// derive `foo.bar.baz` object key from `foo/bar/baz.yml` filename
const key = filenameToKey(path.relative(dir, filename))
const extension = path.extname(filename).toLowerCase()
@@ -84,7 +84,7 @@ export default function dataDirectory(
setWith(data, key, matter(processedContent).content, Object)
break
}
})
}
return data
}

View File

@@ -72,7 +72,7 @@ const reusablesToMove: string[] = []
const imagesToMove: string[] = []
// 2. Add redirects to and update frontmatter in the to-be-migrated early access files BEFORE moving them.
filesToMigrate.forEach((filepath) => {
for (const filepath of filesToMigrate) {
const { content, data } = frontmatter(fs.readFileSync(filepath, 'utf8'))
const redirectString: string = filepath
.replace('content/', '/')
@@ -95,12 +95,18 @@ filesToMigrate.forEach((filepath) => {
variablesToMove.push(...variables)
reusablesToMove.push(...reusables)
imagesToMove.push(...images)
})
}
// 3. Move the data files and images.
Array.from(new Set(variablesToMove)).forEach((varRef) => moveVariable(varRef))
Array.from(new Set(reusablesToMove)).forEach((varRef) => moveReusable(varRef))
Array.from(new Set(imagesToMove)).forEach((imageRef) => moveImage(imageRef))
for (const varRef of Array.from(new Set(variablesToMove))) {
moveVariable(varRef)
}
for (const varRef of Array.from(new Set(reusablesToMove))) {
moveReusable(varRef)
}
for (const imageRef of Array.from(new Set(imagesToMove))) {
moveImage(imageRef)
}
// 4. Move the content files.
execFileSync('mv', [oldPath, migratePath])

View File

@@ -68,11 +68,11 @@ const destinationDirsMap: Record<string, string> = destinationDirNames.reduce(
)
// Remove all existing early access directories from this repo
destinationDirNames.forEach((dirName) => {
for (const dirName of destinationDirNames) {
const destDir = destinationDirsMap[dirName]
rimraf.sync(destDir)
console.log(`- Removed symlink for early access directory '${dirName}' from this repo`)
})
}
// If removing symlinks, just stop here!
if (unlink) {
@@ -84,8 +84,8 @@ if (unlink) {
//
// Move the latest early access source directories into this repo
destinationDirNames.forEach((dirName) => {
if (!earlyAccessLocalRepoDir) return
for (const dirName of destinationDirNames) {
if (!earlyAccessLocalRepoDir) continue
const sourceDir = path.join(earlyAccessLocalRepoDir, dirName)
const destDir = destinationDirsMap[dirName]
@@ -93,7 +93,7 @@ destinationDirNames.forEach((dirName) => {
// If the source directory doesn't exist, skip it
if (!fs.existsSync(sourceDir)) {
console.warn(`Early access directory '${dirName}' does not exist. Skipping...`)
return
continue
}
// Create a symbolic link to the directory
@@ -113,4 +113,4 @@ destinationDirNames.forEach((dirName) => {
}
console.log(`+ Added symlink for early access directory '${dirName}' into this repo`)
})
}

View File

@@ -48,7 +48,7 @@ if (earlyAccessPath) {
// We also need to include any reusable files that are referenced in the selected content files.
const referencedDataFiles: string[] = []
contentFiles.forEach((file) => {
for (const file of contentFiles) {
const contents = fs.readFileSync(file, 'utf8')
const dataRefs: string[] = contents.match(patterns.dataReference) || []
const filepaths: string[] = dataRefs
@@ -62,7 +62,7 @@ if (earlyAccessPath) {
return path.posix.join(process.cwd(), 'data', `${filepath}.md`)
})
referencedDataFiles.push(...filepaths)
})
}
const dataFiles = allEarlyAccessFiles.filter((file) => {
return referencedDataFiles.some((f) =>
@@ -74,7 +74,7 @@ if (earlyAccessPath) {
}
// Update the EA content and data files
selectedFiles.forEach((file) => {
for (const file of selectedFiles) {
const oldContents = fs.readFileSync(file, 'utf8')
const dataRefs: string[] = oldContents.match(patterns.dataReference) || []
@@ -83,58 +83,54 @@ selectedFiles.forEach((file) => {
const replacements: Record<string, string> = {}
if (add) {
dataRefs
// Since we're adding early-access to the path, filter for those that do not already include it
.filter((dataRef) => !dataRef.includes(' early-access.'))
// Since we're adding early-access to the path, filter for those that do not already include it
const dataRefsToAdd = dataRefs.filter((ref) => !ref.includes(' early-access.'))
for (const dataRef of dataRefsToAdd) {
// Add to the { oldRef: newRef } replacements object
.forEach((dataRef) => {
replacements[dataRef] = dataRef.replace(
/({% (?:data|indented_data_reference) )(.*)/,
'$1early-access.$2',
)
})
replacements[dataRef] = dataRef.replace(
/({% (?:data|indented_data_reference) )(.*)/,
'$1early-access.$2',
)
}
imageRefs
// Since we're adding early-access to the path, filter for those that do not already include it
.filter((imageRef) => !imageRef.split('/').includes('early-access'))
// Since we're adding early-access to the path, filter for those that do not already include it
const imageRefsToAdd = imageRefs.filter((ref) => !ref.split('/').includes('early-access'))
for (const imageRef of imageRefsToAdd) {
// Add to the { oldRef: newRef } replacements object
.forEach((imageRef) => {
replacements[imageRef] = imageRef.replace('/assets/images/', '/assets/images/early-access/')
})
replacements[imageRef] = imageRef.replace('/assets/images/', '/assets/images/early-access/')
}
}
if (remove) {
dataRefs
// Since we're removing early-access from the path, filter for those that include it
.filter((dataRef) => dataRef.includes(' early-access.'))
// Since we're removing early-access from the path, filter for those that include it
const dataRefsToRemove = dataRefs.filter((ref) => ref.includes(' early-access.'))
for (const dataRef of dataRefsToRemove) {
// Add to the { oldRef: newRef } replacements object
.forEach((dataRef) => {
replacements[dataRef] = dataRef.replace('early-access.', '').replace('-alt.', '.')
// replacements[dataRef] = dataRef.replace('early-access.', '')
})
replacements[dataRef] = dataRef.replace('early-access.', '').replace('-alt.', '.')
// replacements[dataRef] = dataRef.replace('early-access.', '')
}
imageRefs
// Since we're removing early-access from the path, filter for those that include it
.filter((imageRef) => imageRef.split('/').includes('early-access'))
// Since we're removing early-access from the path, filter for those that include it
const imageRefsToRemove = imageRefs.filter((ref) => ref.split('/').includes('early-access'))
for (const imageRef of imageRefsToRemove) {
// Add to the { oldRef: newRef } replacements object
.forEach((imageRef) => {
replacements[imageRef] = imageRef.replace('/assets/images/early-access/', '/assets/images/')
})
replacements[imageRef] = imageRef.replace('/assets/images/early-access/', '/assets/images/')
}
}
// Return early if nothing to replace
if (!Object.keys(replacements).length) {
return
continue
}
// Make the replacement in the content
let newContents = oldContents
Object.entries(replacements).forEach(([oldRef, newRef]) => {
for (const [oldRef, newRef] of Object.entries(replacements)) {
newContents = newContents.replace(new RegExp(escapeRegExp(oldRef), 'g'), newRef)
})
}
// Write the updated content
fs.writeFileSync(file, newContents)
})
}
console.log('Done! Run "git status" in your docs-early-access checkout to see the changes.\n')

View File

@@ -332,11 +332,11 @@ async function waitForPageReady() {
}
function initClipboardEvent() {
;['copy', 'cut', 'paste'].forEach((verb) => {
for (const verb of ['copy', 'cut', 'paste']) {
document.documentElement.addEventListener(verb, () => {
sendEvent({ type: EventType.clipboard, clipboard_operation: verb })
})
})
}
}
function initCopyButtonEvent() {

View File

@@ -203,12 +203,12 @@ describe('ifversion', () => {
return !matchesPerVersion[version].includes(condition)
})
wantedConditions.forEach((condition: string) => {
for (const condition of wantedConditions as string[]) {
expect(html).toMatch(condition)
})
unwantedConditions.forEach((condition: string) => {
}
for (const condition of unwantedConditions as string[]) {
expect(html).not.toMatch(condition)
})
}
},
)
})

View File

@@ -23,7 +23,7 @@ const pages: { [key: string]: string } = {
}
// create a test for each page, will eventually be separated into finer grain tests
Object.keys(pages).forEach((pageName) => {
for (const pageName of Object.keys(pages)) {
test.describe(`${pageName}`, () => {
test('full page axe scan without experiments', async ({ page }) => {
await page.goto(pages[pageName])
@@ -35,6 +35,7 @@ Object.keys(pages).forEach((pageName) => {
expect(accessibilityScanResults.violations).toEqual([])
})
})
test.describe(`${pageName} (with experiments)`, () => {
test('full page axe scan with experiments', async ({ page }) => {
await page.goto(pages[pageName])
@@ -46,4 +47,4 @@ Object.keys(pages).forEach((pageName) => {
expect(accessibilityScanResults.violations).toEqual([])
})
})
})
}

View File

@@ -49,9 +49,10 @@ export default function ClientSideHighlightJS() {
}
}
})
for (const parent of Array.from(
const codeElementParents = Array.from(
document.querySelectorAll<HTMLElement>(CODE_ELEMENTS_PARENT_SELECTOR),
)) {
)
for (const parent of codeElementParents) {
const language = parent.dataset.highlight || 'json'
if (!SUPPORTED_LANGUAGES.includes(language)) {
if (process.env.NODE_ENV === 'development') {

View File

@@ -55,11 +55,11 @@ export const UtmPreserver = () => {
const applyUtmToLinks = (): void => {
const links = document.querySelectorAll<HTMLAnchorElement>('a[href]')
links.forEach((link) => {
for (const link of links) {
if (link.href && shouldPreserveUtm(link.href)) {
link.href = addUtmParamsToUrl(link.href, utmParams)
}
})
}
}
// Handle click events for dynamic link modification

View File

@@ -5,7 +5,7 @@ export default function copyCode() {
if (!buttons) return
buttons.forEach((button) =>
for (const button of buttons) {
button.addEventListener('click', async () => {
const codeId = (button as HTMLElement).dataset.clipboard
if (!codeId) return
@@ -22,6 +22,6 @@ export default function copyCode() {
setTimeout(() => {
button.classList.remove('copied')
}, 2000)
}),
)
})
}
}

View File

@@ -51,7 +51,7 @@ export default function toggleAnnotation() {
function setActive(annotationButtons: Array<Element>, targetMode?: string) {
const activeElements: Array<Element> = []
targetMode = validateMode(targetMode)
annotationButtons.forEach((el) => {
for (const el of annotationButtons) {
if (el.getAttribute('value') === targetMode) {
el.ariaCurrent = 'true'
el.classList.add('selected')
@@ -60,7 +60,7 @@ function setActive(annotationButtons: Array<Element>, targetMode?: string) {
el.removeAttribute('aria-current')
el.classList.remove('selected')
}
})
}
if (!activeElements.length)
throw new Error('No annotationBtn item is active for code annotation.')
@@ -70,15 +70,15 @@ function setActive(annotationButtons: Array<Element>, targetMode?: string) {
// displays the chosen annotation mode
function displayAnnotationMode(annotationBtnItems: Array<Element>, targetMode?: string) {
if (!targetMode || targetMode === annotationMode.Beside)
annotationBtnItems.forEach((el) => {
if (!targetMode || targetMode === annotationMode.Beside) {
for (const el of annotationBtnItems) {
el.closest('.annotate')?.classList.replace('inline', 'beside')
})
else if (targetMode === annotationMode.Inline)
annotationBtnItems.forEach((el) => {
}
} else if (targetMode === annotationMode.Inline) {
for (const el of annotationBtnItems) {
el.closest('.annotate')?.classList.replace('beside', 'inline')
})
else throw new Error('Invalid target mode set for annotation.')
}
} else throw new Error('Invalid target mode set for annotation.')
setActive(annotationBtnItems, targetMode)
}

View File

@@ -11,7 +11,7 @@ export default function wrapCodeTerms() {
const codeTerms = document.querySelectorAll('#article-contents table code')
if (!codeTerms) return
codeTerms.forEach((node) => {
for (const node of codeTerms) {
// Do the wrapping on the inner text only. With anchor element children
// we'll only handle the case where the code term only has a single child
// and that child is an anchor element.
@@ -37,5 +37,5 @@ export default function wrapCodeTerms() {
} else {
node.innerHTML = node.innerHTML.replace(oldText, newText)
}
})
}
}

View File

@@ -169,7 +169,9 @@ async function getMtime(filePath: string): Promise<number> {
function assertUniqueChildren(page: any): void {
if (page.children.length !== new Set(page.children).size) {
const count: Record<string, number> = {}
page.children.forEach((entry: string) => (count[entry] = 1 + (count[entry] || 0)))
for (const entry of page.children) {
count[entry] = 1 + (count[entry] || 0)
}
let msg = `${page.relativePath} has duplicates in the 'children' key.`
for (const [entry, times] of Object.entries(count)) {
if (times > 1) msg += ` '${entry}' is repeated ${times} times. `

View File

@@ -170,14 +170,16 @@ class Page {
// where as notations like `__GHES_DEPRECATED__[3]`
// or `__GHES_SUPPORTED__[0]` are static.
if (opts.basePath.split(path.sep).includes('fixtures')) {
supported.forEach((version: string, i: number, arr: string[]) => {
for (let i = 0; i < supported.length; i++) {
const version: string = supported[i]
markdown = markdown.replaceAll(`__GHES_SUPPORTED__[${i}]`, version)
markdown = markdown.replaceAll(`__GHES_SUPPORTED__[-${arr.length - i}]`, version)
})
deprecated.forEach((version: string, i: number, arr: string[]) => {
markdown = markdown.replaceAll(`__GHES_SUPPORTED__[-${supported.length - i}]`, version)
}
for (let i = 0; i < deprecated.length; i++) {
const version: string = deprecated[i]
markdown = markdown.replaceAll(`__GHES_DEPRECATED__[${i}]`, version)
markdown = markdown.replaceAll(`__GHES_DEPRECATED__[-${arr.length - i}]`, version)
})
markdown = markdown.replaceAll(`__GHES_DEPRECATED__[-${deprecated.length - i}]`, version)
}
}
return {

View File

@@ -48,7 +48,9 @@ function getBreadcrumbs(req: ExtendedRequest, isEarlyAccess: boolean) {
req.context.currentPath,
req.context.currentProductTreeTitles,
)
;[...Array(cutoff)].forEach(() => breadcrumbsResult.shift())
for (let i = 0; i < cutoff; i++) {
breadcrumbsResult.shift()
}
return breadcrumbsResult
}

View File

@@ -27,9 +27,9 @@ export default async function glossaries(req: ExtendedRequest, res: Response, ne
if (req.context.currentLanguage !== 'en') {
const enGlossariesRaw: Glossary[] = getDataByLanguage('glossaries.external', 'en')
enGlossariesRaw.forEach(({ term, description }) => {
for (const { term, description } of enGlossariesRaw) {
enGlossaryMap.set(term, description)
})
}
}
// The glossaries Yaml file contains descriptions that might contain

View File

@@ -172,9 +172,8 @@ MyApp.getInitialProps = async (appContext: AppContext) => {
// Note, `req` will be undefined if this is the client-side rendering
// of a 500 page ("Ooops! It looks like something went wrong.")
if (req?.context?.languages) {
for (const [langCode, langObj] of Object.entries(
req.context.languages as Record<string, LanguageItem>,
)) {
const languageEntries = Object.entries(req.context.languages as Record<string, LanguageItem>)
for (const [langCode, langObj] of languageEntries) {
// Only pick out the keys we actually need
languagesContext.languages[langCode] = {
name: langObj.name,

View File

@@ -20,7 +20,7 @@ describe('block robots', () => {
.filter((product) => product.wip)
.map((product) => product.id)
wipProductIds.forEach((id) => {
for (const id of wipProductIds) {
const { href } = productMap[id]
const blockedPaths = [
`/en${href}`,
@@ -30,10 +30,10 @@ describe('block robots', () => {
`/en/enterprise/${enterpriseServerReleases.oldestSupported}/user${href}`,
]
blockedPaths.forEach((path) => {
for (const path of blockedPaths) {
expect(allowIndex(path)).toBe(false)
})
})
}
}
})
test('disallows crawling of early access "hidden" products', async () => {
@@ -41,19 +41,19 @@ describe('block robots', () => {
.filter((product) => product.hidden)
.map((product) => product.id)
hiddenProductIds.forEach((id) => {
for (const id of hiddenProductIds) {
const { versions } = productMap[id]
if (!versions) return
if (!versions) continue
const blockedPaths = versions
.map((version) => {
return [`/en/${version}/${id}`, `/en/${version}/${id}/some-early-access-article`]
})
.flat()
blockedPaths.forEach((path) => {
for (const path of blockedPaths) {
expect(allowIndex(path)).toBe(false)
})
})
}
}
})
test('allows crawling of non-WIP products', async () => {
@@ -68,7 +68,7 @@ describe('block robots', () => {
})
test('disallows crawling of deprecated enterprise releases', async () => {
enterpriseServerReleases.deprecated.forEach((version) => {
for (const version of enterpriseServerReleases.deprecated) {
const blockedPaths = [
`/en/enterprise-server@${version}/actions`,
`/en/enterprise/${version}/actions`,
@@ -76,9 +76,9 @@ describe('block robots', () => {
`/en/enterprise/${version}/actions/overview`,
]
blockedPaths.forEach((path) => {
for (const path of blockedPaths) {
expect(allowIndex(path)).toBe(false)
})
})
}
}
})
})

View File

@@ -59,18 +59,18 @@ describe('pages module', () => {
const versionedRedirects: Array<{ path: string; file: string }> = []
// Page objects have dynamic properties from chain/lodash that aren't fully typed
englishPages.forEach((page: any) => {
page.redirect_from.forEach((redirect: string) => {
page.applicableVersions.forEach((version: string) => {
for (const page of englishPages) {
for (const redirect of (page as any).redirect_from) {
for (const version of (page as any).applicableVersions) {
const versioned = removeFPTFromPath(path.posix.join('/', version, redirect))
versionedRedirects.push({ path: versioned, file: page.fullPath })
versionedRedirects.push({ path: versioned, file: (page as any).fullPath })
if (!redirectToFiles.has(versioned)) {
redirectToFiles.set(versioned, new Set<string>())
}
redirectToFiles.get(versioned)!.add(page.fullPath)
})
})
})
redirectToFiles.get(versioned)!.add((page as any).fullPath)
}
}
}
// Only consider as duplicate if more than one unique file defines the same redirect
const duplicates = Array.from(redirectToFiles.entries())

View File

@@ -166,10 +166,10 @@ describe('server', () => {
const categories = JSON.parse(res.body)
expect(Array.isArray(categories)).toBe(true)
expect(categories.length).toBeGreaterThan(1)
categories.forEach((category: Category) => {
for (const category of categories as Category[]) {
expect('name' in category).toBe(true)
expect('published_articles' in category).toBe(true)
})
}
})
describeViaActionsOnly('Early Access articles', () => {

View File

@@ -61,7 +61,7 @@ describe('siteTree', () => {
function validate(currentPage: Tree): void {
const childPages: Tree[] = currentPage.childPages || []
childPages.forEach((childPage) => {
for (const childPage of childPages) {
// Store page reference before validation to avoid type narrowing
const pageRef: Tree = childPage
const isValid = siteTreeValidate(childPage)
@@ -76,5 +76,5 @@ function validate(currentPage: Tree): void {
// Run recursively until we run out of child pages
validate(pageRef)
})
}
}

View File

@@ -142,7 +142,7 @@ async function createRedirectsFile(pageList: PageList, outputDirectory: string)
const redirectEntries: Array<[string, string]> = Object.entries(redirects)
redirectEntries.forEach(([oldPath, newPath]) => {
for (let [oldPath, newPath] of redirectEntries) {
// remove any liquid variables that sneak in
oldPath = oldPath.replace('/{{ page.version }}', '').replace('/{{ currentVersion }}', '')
// ignore any old paths that are not in this version
@@ -152,10 +152,10 @@ async function createRedirectsFile(pageList: PageList, outputDirectory: string)
oldPath.includes(`/enterprise/${version}`)
)
)
return
continue
redirectsPerVersion[oldPath] = newPath
})
}
fs.writeFileSync(
path.join(outputDirectory, 'redirects.json'),

View File

@@ -102,5 +102,7 @@ function updateFeatureData() {
}
console.log('Feature files with all versions: ')
allFeatureFiles.forEach((file) => console.log(file))
for (const file of allFeatureFiles) {
console.log(file)
}
}

View File

@@ -54,7 +54,7 @@ async function main(): Promise<void> {
}
const formattedDates: EnterpriseDates = {}
Object.entries(rawDates).forEach(([releaseNumber, releaseObject]) => {
for (const [releaseNumber, releaseObject] of Object.entries(rawDates)) {
formattedDates[releaseNumber] = {
// For backward compatibility, keep releaseDate as RC date initially, then GA date once available
releaseDate: releaseObject.release_candidate || releaseObject.start,
@@ -62,7 +62,7 @@ async function main(): Promise<void> {
releaseCandidateDate: releaseObject.release_candidate,
generalAvailabilityDate: releaseObject.start,
}
})
}
const formattedDatesString = JSON.stringify(formattedDates, null, 2)

View File

@@ -378,10 +378,10 @@ function getDisplayPermissions(
): Array<Record<string, string>> {
const displayPermissions = permissionSets.map((permissionSet) => {
const displayPermissionSet: Record<string, string> = {}
Object.entries(permissionSet).forEach(([key, value]) => {
for (const [key, value] of Object.entries(permissionSet)) {
const { displayTitle } = getDisplayTitle(key, progActorResources, true)
displayPermissionSet[displayTitle] = value
})
}
return displayPermissionSet
})
@@ -592,9 +592,9 @@ async function getProgActorResourceContent({
if (Object.keys(fileContent).length !== 1) {
throw new Error(`Error: The file ${JSON.stringify(fileContent)} must only have one key.`)
}
Object.entries(fileContent).forEach(([key, value]) => {
for (const [key, value] of Object.entries(fileContent)) {
progActorResources[key] = value
})
}
}
return progActorResources
}

View File

@@ -19,9 +19,9 @@ const changelog = new Map<string, any>()
const graphqlSchema = new Map<string, any>()
const miniTocs = new Map<string, Map<string, Map<string, any[]>>>()
Object.keys(languages).forEach((language) => {
for (const language of Object.keys(languages)) {
miniTocs.set(language, new Map())
})
}
// Using any for return type as the GraphQL schema structure is complex and dynamically loaded from JSON
export function getGraphqlSchema(version: string, type: string): any {

View File

@@ -56,16 +56,16 @@ export const getServerSideProps: GetServerSideProps<Props> = async (context) =>
// <li>Field filename was added to object type <code>IssueTemplate</code></li>
//
// ...without the additional <p>.
schema.forEach((item) => {
for (const item of schema) {
for (const group of [item.schemaChanges, item.previewChanges, item.upcomingChanges]) {
group.forEach((change) => {
for (const change of group) {
change.changes = change.changes.map((html) => {
if (html.startsWith('<p>') && html.endsWith('</p>')) return html.slice(3, -4)
return html
})
})
}
}
})
}
return {
props: {

View File

@@ -97,14 +97,14 @@ export async function createChangelogEntry(
const changes = await diff(oldSchema, newSchema)
const changesToReport: Change[] = []
const ignoredChanges: Change[] = []
changes.forEach((change) => {
for (const change of changes) {
if (CHANGES_TO_REPORT.includes(change.type)) {
changesToReport.push(change)
} else {
// Track ignored changes for visibility
ignoredChanges.push(change)
}
})
}
// Log warnings for ignored change types to provide visibility
if (ignoredChanges.length > 0) {
@@ -112,10 +112,10 @@ export async function createChangelogEntry(
console.warn(
`⚠️ GraphQL changelog: Ignoring ${ignoredChanges.length} changes of ${ignoredTypes.length} type(s):`,
)
ignoredTypes.forEach((type) => {
for (const type of ignoredTypes) {
const count = ignoredChanges.filter((change) => change.type === type).length
console.warn(` - ${type} (${count} change${count > 1 ? 's' : ''})`)
})
}
console.warn(
' These change types are not in CHANGES_TO_REPORT and will not appear in the changelog.',
)
@@ -257,15 +257,15 @@ export function segmentPreviewChanges(
// Build a map of `{ path => previewTitle` }
// for easier lookup of change to preview
const pathToPreview: Record<string, string> = {}
previews.forEach(function (preview): void {
preview.toggled_on.forEach(function (path) {
for (const preview of previews) {
for (const path of preview.toggled_on) {
pathToPreview[path] = preview.title
})
})
}
}
const schemaChanges: Change[] = []
const changesByPreview: Record<string, PreviewChanges> = {}
changesToReport.forEach(function (change): void {
for (const change of changesToReport) {
// For each change, see if its path _or_ one of its ancestors
// is covered by a preview. If it is, mark this change as belonging to a preview
const pathParts = change.path?.split('.') || []
@@ -290,7 +290,7 @@ export function segmentPreviewChanges(
} else {
schemaChanges.push(change)
}
})
}
return { schemaChangesToReport: schemaChanges, previewChangesToReport: changesByPreview }
}

View File

@@ -21,7 +21,7 @@ export default function processPreviews(previews: RawPreview[]): ProcessedPrevie
// clean up raw yml data
// Using any type because we're mutating the preview object to add new properties
// that don't exist in the RawPreview interface (accept_header, href)
previews.forEach((preview: any) => {
for (const preview of previews as any[]) {
preview.title = sentenceCase(preview.title)
.replace(/ -.+/, '') // remove any extra info that follows a hyphen
.replace('it hub', 'itHub') // fix overcorrected `git hub` from sentenceCasing
@@ -46,7 +46,7 @@ export default function processPreviews(previews: RawPreview[]): ProcessedPrevie
slugger.reset()
preview.href = `/graphql/overview/schema-previews#${slugger.slug(preview.title)}`
})
}
return previews as ProcessedPreview[]
}

View File

@@ -23,16 +23,16 @@ describe('graphql json files', () => {
// so use a cache of which we've already validated to speed this
// test up significantly.
const typeObjsTested = new Set<string>()
graphqlVersions.forEach((version) => {
for (const version of graphqlVersions) {
const schemaJsonPerVersion = readJsonFile(
`${GRAPHQL_DATA_DIR}/${version}/schema.json`,
) as Record<string, Array<{ kind: string; name: string }>>
// all graphql types are arrays except for queries
graphqlTypes.forEach((type) => {
for (const type of graphqlTypes) {
test(`${version} schemas object validation for ${type}`, () => {
schemaJsonPerVersion[type].forEach((typeObj) => {
for (const typeObj of schemaJsonPerVersion[type]) {
const key = JSON.stringify(typeObj) + type
if (typeObjsTested.has(key)) return
if (typeObjsTested.has(key)) continue
typeObjsTested.add(key)
const { isValid, errors } = validateJson(
@@ -48,15 +48,15 @@ describe('graphql json files', () => {
}
expect(isValid, formattedErrors).toBe(true)
})
}
})
})
})
}
}
test('previews object validation', () => {
graphqlVersions.forEach((version) => {
for (const version of graphqlVersions) {
const previews = readJsonFile(`${GRAPHQL_DATA_DIR}/${version}/previews.json`) as Array<any> // GraphQL preview schema structure is dynamic
previews.forEach((preview) => {
for (const preview of previews) {
const isValid = previewsValidate(preview)
let errors: string | undefined
@@ -65,18 +65,18 @@ describe('graphql json files', () => {
}
expect(isValid, errors).toBe(true)
})
})
}
}
})
test('upcoming changes object validation', () => {
graphqlVersions.forEach((version) => {
for (const version of graphqlVersions) {
const upcomingChanges = readJsonFile(
`${GRAPHQL_DATA_DIR}/${version}/upcoming-changes.json`,
) as Record<string, Array<any>> // GraphQL change object structure is dynamic
for (const changes of Object.values(upcomingChanges)) {
// each object value is an array of changes
changes.forEach((changeObj) => {
for (const changeObj of changes) {
const isValid = upcomingChangesValidate(changeObj)
let errors: string | undefined
@@ -85,8 +85,8 @@ describe('graphql json files', () => {
}
expect(isValid, errors).toBe(true)
})
}
}
})
}
})
})

View File

@@ -133,7 +133,7 @@ function RestNavListItem({ category }: { category: ProductTreeNode }) {
if (nonAutomatedRestPaths.every((item: string) => !asPath.includes(item))) {
const observer = new IntersectionObserver(
(entries) => {
entries.forEach((entry) => {
for (const entry of entries) {
if (entry.target.id) {
const anchor = `#${entry.target.id.split('--')[0]}`
if (entry.isIntersecting === true) setVisibleAnchor(anchor)
@@ -142,7 +142,7 @@ function RestNavListItem({ category }: { category: ProductTreeNode }) {
} else {
setVisibleAnchor('')
}
})
}
},
{ rootMargin: '0px 0px -85% 0px' },
)
@@ -150,9 +150,9 @@ function RestNavListItem({ category }: { category: ProductTreeNode }) {
// we can remove the h2 here
const headingsList = Array.from(document.querySelectorAll('h2, h3'))
headingsList.forEach((heading) => {
for (const heading of headingsList) {
observer.observe(heading)
})
}
return () => {
observer.disconnect()

View File

@@ -17,18 +17,18 @@ describe('octicons reference', () => {
})
test('all octicons are strings', () => {
VALID_OCTICONS.forEach((octicon) => {
for (const octicon of VALID_OCTICONS) {
expect(typeof octicon).toBe('string')
})
}
})
})
describe('OCTICON_COMPONENTS', () => {
test('has components for all valid octicons', () => {
VALID_OCTICONS.forEach((octicon) => {
for (const octicon of VALID_OCTICONS) {
expect(OCTICON_COMPONENTS[octicon]).toBeDefined()
expect(typeof OCTICON_COMPONENTS[octicon]).toBe('object')
})
}
})
test('maps specific octicons to correct components', () => {
@@ -90,9 +90,9 @@ describe('octicons reference', () => {
// Test a few key octicons to verify the type works correctly
const testOcticons: ValidOcticon[] = ['bug', 'rocket', 'copilot']
testOcticons.forEach((octicon) => {
for (const octicon of testOcticons) {
expect(VALID_OCTICONS.includes(octicon)).toBe(true)
})
}
})
})
@@ -101,9 +101,9 @@ describe('octicons reference', () => {
const componentKeys = Object.keys(OCTICON_COMPONENTS)
const validOcticonsSet = new Set(VALID_OCTICONS)
componentKeys.forEach((key) => {
for (const key of componentKeys) {
expect(validOcticonsSet.has(key as ValidOcticon)).toBe(true)
})
}
expect(componentKeys).toHaveLength(VALID_OCTICONS.length)
})

View File

@@ -369,12 +369,14 @@ export function correctTranslatedContentStrings(
}
if (content.includes('{{%')) {
content.split('\n').forEach((line, i) => {
const lines = content.split('\n')
for (let i = 0; i < lines.length; i++) {
const line = lines[i]
if (line.includes('{{%') && !line.includes('{{{% endraw')) {
console.log(context.code, 'context.relativePath', context.relativePath)
console.log(i, line)
}
})
}
}
return content

View File

@@ -56,9 +56,10 @@ export default function getEnglishHeadings(
// return a map from translation:English
const headingMap: Record<string, string> = {}
translatedHeadings.forEach((k: string, i: number) => {
for (let i = 0; i < translatedHeadings.length; i++) {
const k = translatedHeadings[i]
headingMap[k] = englishHeadings[i]
})
}
return headingMap
}

View File

@@ -60,26 +60,26 @@ const languages: Languages = { ...allLanguagesWithDirs }
if (TRANSLATIONS_FIXTURE_ROOT) {
// Keep all languages that have a directory in the fixture root.
Object.entries(languages).forEach(([code, { dir }]) => {
for (const [code, { dir }] of Object.entries(languages)) {
if (code !== 'en' && !fs.existsSync(dir)) {
delete languages[code]
}
})
}
} else if (process.env.ENABLED_LANGUAGES) {
if (process.env.ENABLED_LANGUAGES.toLowerCase() !== 'all') {
Object.keys(languages).forEach((code) => {
for (const code of Object.keys(languages)) {
if (!process.env.ENABLED_LANGUAGES!.includes(code)) {
delete languages[code]
}
})
}
// This makes the translation health report not valid JSON
// console.log(`ENABLED_LANGUAGES: ${process.env.ENABLED_LANGUAGES}`)
}
} else if (process.env.NODE_ENV === 'test') {
// Unless explicitly set, when running tests default to just English
Object.keys(languages).forEach((code) => {
for (const code of Object.keys(languages)) {
if (code !== 'en') delete languages[code]
})
}
}
export const languageKeys: string[] = Object.keys(languages)

View File

@@ -134,24 +134,28 @@ function run(languageCode: string, site: Site, englishReusables: Reusables) {
const sumTotal = flat.reduce((acc, [, count]) => acc + count, 0)
console.log('\nMost common errors')
flat.forEach(([error, count], i) => {
for (let i = 0; i < flat.length; i++) {
const [error, count] = flat[i]
console.log(`${i + 1}.`.padEnd(3), error.padEnd(PADDING), count)
})
}
console.log(`${'TOTAL:'.padEnd(3 + 1 + PADDING)}`, sumTotal)
if (sumTotal) {
const whereFlat = Array.from(wheres.entries()).sort((a, b) => b[1] - a[1])
console.log('\nMost common places')
whereFlat.forEach(([error, count], i) => {
for (let i = 0; i < whereFlat.length; i++) {
const [error, count] = whereFlat[i]
console.log(`${i + 1}.`.padEnd(3), error.padEnd(PADDING), count)
})
}
const illegalTagsFlat = Array.from(illegalTags.entries()).sort((a, b) => b[1] - a[1])
if (illegalTagsFlat.reduce((acc, [, count]) => acc + count, 0)) {
console.log('\nMost common illegal tags', illegalTagsFlat.length > 10 ? ' (Top 10)' : '')
illegalTagsFlat.slice(0, 10).forEach(([error, count], i) => {
const topIllegalTags = illegalTagsFlat.slice(0, 10)
for (let i = 0; i < topIllegalTags.length; i++) {
const [error, count] = topIllegalTags[i]
console.log(`${i + 1}.`.padEnd(3), error.padEnd(PADDING), count)
})
}
}
}
console.log('\n')

View File

@@ -122,18 +122,18 @@ async function main(opts: MainOptions, args: string[]) {
const helpIndices = getIndicesOf('GitHub.help_url', contents)
helpIndices.push(...getIndicesOf('GitHub.developer_help_url', contents))
if (docsIndices.length > 0) {
docsIndices.forEach((numIndex) => {
for (const numIndex of docsIndices) {
// Assuming we don't have links close to 500 characters long
const docsLink = contents.substring(numIndex, numIndex + 500).match(urlRegEx)
if (!docsLink) return
const linkURL = new URL(docsLink[0].toString().replace(/[^a-zA-Z0-9]*$|\\n$/g, ''))
const linkPath = linkURL.pathname + linkURL.hash
docsLinksFiles.push({ linkPath, file })
})
}
}
if (helpIndices.length > 0) {
helpIndices.forEach((numIndex) => {
for (const numIndex of helpIndices) {
// There are certain links like #{GitHub.help_url}#{learn_more_path} and #{GitHub.developer_help_url}#{learn_more_path} that we should skip
if (
(contents.substring(numIndex, numIndex + 11) === 'GitHub.help' &&
@@ -170,7 +170,7 @@ async function main(opts: MainOptions, args: string[]) {
docsLinksFiles.push({ linkPath, file })
}
})
}
}
}
}

View File

@@ -18,11 +18,11 @@ const STATIC_PREFIXES = {
public: path.resolve(path.join('src', 'graphql', 'data')),
}
// Sanity check that these are valid paths
Object.entries(STATIC_PREFIXES).forEach(([key, value]) => {
for (const [key, value] of Object.entries(STATIC_PREFIXES)) {
if (!fs.existsSync(value)) {
throw new Error(`Can't find static prefix (${key}): ${value}`)
}
})
}
program
.description('Analyze all checked content files, render them, and check for flaws.')

View File

@@ -87,11 +87,11 @@ const STATIC_PREFIXES: Record<string, string> = {
public: path.resolve(path.join('src', 'graphql', 'data')),
}
// Sanity check that these are valid paths
Object.entries(STATIC_PREFIXES).forEach(([key, value]) => {
for (const [key, value] of Object.entries(STATIC_PREFIXES)) {
if (!fs.existsSync(value)) {
throw new Error(`Can't find static prefix (${key}): ${value}`)
}
})
}
// By default, we don't cache external link checks to disk.
// By setting this env var to something >0, it enables the disk-based

View File

@@ -228,7 +228,8 @@ function printObjectDifference(
const combinedKey = `${parentKey}.${key}`
if (Array.isArray(value) && !equalArray(value, objTo[key])) {
const printedKeys = new Set()
value.forEach((entry, i) => {
for (let i = 0; i < value.length; i++) {
const entry = value[i]
// If it was an array of objects, we need to go deeper!
if (isObject(entry)) {
printObjectDifference(entry, objTo[key][i], rawContent, combinedKey)
@@ -243,10 +244,9 @@ function printObjectDifference(
const needle = new RegExp(`- ${entry}\\b`)
const index = rawContent.split(/\n/g).findIndex((line) => needle.test(line))
console.log(' ', chalk.dim(`line ${(index && index + 1) || 'unknown'}`))
console.log('')
}
}
})
}
} else if (typeof value === 'object' && value !== null) {
printObjectDifference(value, objTo[key], rawContent, combinedKey)
}

View File

@@ -111,9 +111,10 @@ export function createLogger(filePath: string) {
finalMessage = `${finalMessage}: ${errorObjects[0].message}`
} else {
// Multiple errors - use indexed keys and append all error messages
errorObjects.forEach((error, index) => {
for (let index = 0; index < errorObjects.length; index++) {
const error = errorObjects[index]
includeContext[`error_${index + 1}`] = error
})
}
const errorMessages = errorObjects.map((err) => err.message).join(', ')
finalMessage = `${finalMessage}: ${errorMessages}`
}

View File

@@ -61,7 +61,7 @@ export function toLogfmt(jsonString: Record<string, any>): string {
result: Record<string, any> = {},
seen: WeakSet<object> = new WeakSet(),
): Record<string, any> => {
Object.keys(obj).forEach((key) => {
for (const key of Object.keys(obj)) {
const newKey = parentKey ? `${parentKey}.${key}` : key
const value = obj[key]
@@ -69,19 +69,19 @@ export function toLogfmt(jsonString: Record<string, any>): string {
// Handle circular references
if (seen.has(value)) {
result[newKey] = '[Circular]'
return
continue
}
// Handle Date objects specially
if (value instanceof Date) {
result[newKey] = value.toISOString()
return
continue
}
// Handle arrays
if (Array.isArray(value)) {
result[newKey] = value.join(',')
return
continue
}
// Handle other objects - only flatten if not empty
@@ -96,7 +96,7 @@ export function toLogfmt(jsonString: Record<string, any>): string {
result[newKey] =
value === undefined || (typeof value === 'string' && value === '') ? null : value
}
})
}
return result
}

View File

@@ -125,11 +125,11 @@ export async function getLocalizedGroupNames(lang: string): Promise<{ [key: stri
export function createOcticonToNameMap(childGroups: ProductGroupData[]): { [key: string]: string } {
const octiconToName: { [key: string]: string } = {}
childGroups.forEach((group: ProductGroupData) => {
for (const group of childGroups) {
if (group.octicon && group.name) {
octiconToName[group.octicon] = group.name
}
})
}
return octiconToName
}
@@ -140,11 +140,11 @@ export function mapEnglishToLocalizedNames(
): { [key: string]: string } {
const nameMap: { [key: string]: string } = {}
englishGroups.forEach((englishGroup: ProductGroupData) => {
for (const englishGroup of englishGroups) {
if (englishGroup.octicon && localizedByOcticon[englishGroup.octicon]) {
nameMap[englishGroup.name] = localizedByOcticon[englishGroup.octicon]
}
})
}
return nameMap
}

View File

@@ -5,8 +5,8 @@ const productNames: ProductNames = {
dotcom: 'GitHub.com',
}
enterpriseServerReleases.all.forEach((version) => {
for (const version of enterpriseServerReleases.all) {
productNames[version] = `Enterprise Server ${version}`
})
}
export default productNames

View File

@@ -14,7 +14,7 @@ describe('products module', () => {
})
test('every product is valid', () => {
Object.values(productMap).forEach((product) => {
for (const product of Object.values(productMap)) {
const isValid = validate(product)
let errors: string | undefined
@@ -22,6 +22,6 @@ describe('products module', () => {
errors = formatAjvErrors(validate.errors)
}
expect(isValid, errors).toBe(true)
})
}
})
})

View File

@@ -25,7 +25,7 @@ export default function permalinkRedirects(
// For every "old" path in a content file's redirect_from frontmatter, also add that path to
// the redirects object as a key, where the value is the content file's permalink.
redirectFrom.forEach((frontmatterOldPath) => {
for (let frontmatterOldPath of redirectFrom) {
if (!frontmatterOldPath.startsWith('/')) {
throw new Error(
`'${frontmatterOldPath}' is not a valid redirect_from frontmatter value because it doesn't start with a /`,
@@ -40,7 +40,8 @@ export default function permalinkRedirects(
.replace('/admin/guides/', '/admin/')
.replace(/^\/enterprise\/admin\//, '/admin/')
permalinks.forEach((permalink, index) => {
for (let index = 0; index < permalinks.length; index++) {
const permalink = permalinks[index]
// For the first supported permalink (the order is determined by lib/all-versions),
// put an entry into `redirects` without any version prefix.
if (index === 0) {
@@ -49,8 +50,8 @@ export default function permalinkRedirects(
// For every permalink, put an entry into `redirects` with the version prefix.
redirects[`/${permalink.pageVersion}${frontmatterOldPath}`] = permalink.hrefWithoutLanguage
})
})
}
}
return redirects
}

View File

@@ -22,9 +22,9 @@ export async function precompileRedirects(pageList: Page[]): Promise<Redirects>
// CURRENT PAGES PERMALINKS AND FRONTMATTER
// create backwards-compatible old paths for page permalinks and frontmatter redirects
pageList
.filter((page) => page.languageCode === 'en')
.forEach((page) => Object.assign(allRedirects, page.buildRedirects()))
for (const page of pageList.filter((xpage) => xpage.languageCode === 'en')) {
Object.assign(allRedirects, page.buildRedirects())
}
// NOTE: Exception redirects **MUST COME AFTER** pageList redirects above in order
// to properly override them. Exception redirects are unicorn one-offs that are not
@@ -46,7 +46,7 @@ export async function precompileRedirects(pageList: Page[]): Promise<Redirects>
const exceptions = getExceptionRedirects(EXCEPTIONS_FILE) as Redirects
Object.assign(allRedirects, exceptions)
Object.entries(allRedirects).forEach(([fromURI, toURI]) => {
for (const [fromURI, toURI] of Object.entries(allRedirects)) {
// If the destination URL has a hardcoded `enterprise-server@latest` in
// it we need to rewrite that now.
// We never want to redirect to that as the final URL (in the 301 response)
@@ -60,7 +60,7 @@ export async function precompileRedirects(pageList: Page[]): Promise<Redirects>
`/enterprise-server@${latest}`,
)
}
})
}
return allRedirects
}

View File

@@ -58,17 +58,17 @@ describe('versioned redirects', () => {
expect(redirectKeys.length).toBeGreaterThan(0)
// Verify all old paths are properly formatted
redirectKeys.forEach((oldPath) => {
for (const oldPath of redirectKeys) {
expect(oldPath).toMatch(/^\/[a-z0-9-/]+$/)
expect(oldPath).not.toMatch(/^\/en\//)
})
}
// Verify all new paths have proper versioning
Object.values(versionlessRedirects).forEach((newPath) => {
for (const newPath of Object.values(versionlessRedirects)) {
expect(newPath).toMatch(
/^\/(enterprise-cloud@latest|enterprise-server@latest|admin|github|articles|billing|code-security|actions|packages|copilot|rest|webhooks|developers)/,
)
})
}
})
test('enterprise-server@latest paths are properly transformed', () => {
@@ -76,7 +76,7 @@ describe('versioned redirects', () => {
newPath.includes('/enterprise-server@latest'),
)
enterpriseServerPaths.forEach(([, newPath]) => {
for (const [, newPath] of enterpriseServerPaths) {
const transformedPath = `/en${newPath.replace(
'/enterprise-server@latest',
`/enterprise-server@${latest}`,
@@ -85,6 +85,6 @@ describe('versioned redirects', () => {
expect(transformedPath).toContain(`/enterprise-server@${latest}`)
expect(transformedPath).not.toContain('/enterprise-server@latest')
expect(transformedPath).toMatch(/^\/en\//)
})
}
})
})

View File

@@ -88,11 +88,11 @@ export default async function ghesReleaseNotesContext(
req.context.latestRelease = latestStable
// Add convenience props for "Supported releases" section on GHES Admin landing page (NOT release notes).
req.context.ghesReleases.forEach((release) => {
for (const release of req.context.ghesReleases) {
release.firstPreviousRelease = all[all.findIndex((v) => v === release.version) + 1]
release.secondPreviousRelease =
all[all.findIndex((v) => v === release.firstPreviousRelease) + 1]
})
}
return next()
}

View File

@@ -42,7 +42,7 @@ describe('lint enterprise release notes', () => {
for (const key in sections) {
const section = sections[key]
const label = `sections.${key}`
section.forEach((part) => {
for (const part of section) {
if (Array.isArray(part)) {
toLint = { ...toLint, ...{ [label]: section.join('\n') } }
} else {
@@ -53,7 +53,7 @@ describe('lint enterprise release notes', () => {
}
}
}
})
}
}
// Create context with site data for rendering liquid variables

View File

@@ -25,14 +25,14 @@ export const RestReferencePage = ({ restOperations }: StructuredContentT) => {
useEffect(() => {
const codeBlocks = document.querySelectorAll<HTMLPreElement>('pre')
codeBlocks.forEach((codeBlock) => {
for (const codeBlock of codeBlocks) {
if (
codeBlock.scrollWidth > codeBlock.clientWidth ||
codeBlock.scrollHeight > codeBlock.clientHeight
) {
codeBlock.setAttribute('tabindex', '0')
}
})
}
}, [])
return (

View File

@@ -101,10 +101,10 @@ export function getShellExample(
const { bodyParameters } = codeSample.request
if (bodyParameters && typeof bodyParameters === 'object' && !Array.isArray(bodyParameters)) {
const paramNames = Object.keys(bodyParameters)
paramNames.forEach((elem) => {
for (const elem of paramNames) {
const escapedValue = escapeShellValue(String(bodyParameters[elem]))
requestBodyParams = `${requestBodyParams} ${CURL_CONTENT_TYPE_MAPPING[contentType]} '${elem}=${escapedValue}'`
})
}
} else {
const escapedValue = escapeShellValue(String(bodyParameters))
requestBodyParams = `${CURL_CONTENT_TYPE_MAPPING[contentType]} "${escapedValue}"`

View File

@@ -59,22 +59,22 @@ const restOperationData = new Map<
>()
const restOperations = new Map<string, Map<string, RestOperationData>>()
Object.keys(languages).forEach((language: string) => {
for (const language of Object.keys(languages)) {
restOperationData.set(language, new Map())
Object.keys(allVersions).forEach((version: string) => {
for (const version of Object.keys(allVersions)) {
// setting to undefined will allow us to perform checks
// more easily later on
restOperationData.get(language)!.set(version, new Map())
if (allVersions[version].apiVersions && allVersions[version].apiVersions.length > 0) {
allVersions[version].apiVersions.forEach((date: string) => {
for (const date of allVersions[version].apiVersions) {
restOperationData.get(language)!.get(version)!.set(date, new Map())
})
}
} else {
// Products that are not been calendar date versioned
restOperationData.get(language)!.get(version)!.set(NOT_API_VERSIONED, new Map())
}
})
})
}
}
export const categoriesWithoutSubcategories: string[] = fs
.readdirSync(REST_CONTENT_DIR)

View File

@@ -144,7 +144,7 @@ export const getServerSideProps: GetServerSideProps<Props> = async (context) =>
req.context,
)) as MinitocItemsT
miniTocItems.restOperationsMiniTocItems.forEach((operationMinitoc) => {
for (const operationMinitoc of miniTocItems.restOperationsMiniTocItems) {
const { title, href: miniTocAnchor } = operationMinitoc.contents
const fullPath = `/${context.locale}${versionPathSegment}rest/${context.params?.category}/${subCat}${miniTocAnchor}`
@@ -152,7 +152,7 @@ export const getServerSideProps: GetServerSideProps<Props> = async (context) =>
fullPath,
title,
})
})
}
// TocLanding expects a collection of objects that looks like this:
//

View File

@@ -37,12 +37,12 @@ export async function getDiffOpenAPIContentRest(): Promise<ErrorMessages> {
for (const schemaName in differences) {
errorMessages[schemaName] = {}
differences[schemaName].forEach((category) => {
for (const category of differences[schemaName]) {
errorMessages[schemaName][category] = {
contentDir: checkContentDir[schemaName][category],
openAPI: openAPISchemaCheck[schemaName][category],
}
})
}
}
}
@@ -57,23 +57,23 @@ async function createOpenAPISchemasCheck(): Promise<CheckObject> {
// Allow the most recent deprecation to exist on disk until fully deprecated
.filter((dir) => !dir.includes(deprecated[0]))
restDirectory.forEach((dir) => {
for (const dir of restDirectory) {
const filename = path.join(REST_DATA_DIR, dir, REST_SCHEMA_FILENAME)
const fileSchema = JSON.parse(fs.readFileSync(filename, 'utf8'))
const categories = Object.keys(fileSchema).sort()
const version = getDocsVersion(dir)
categories.forEach((category) => {
for (const category of categories) {
const subcategories = Object.keys(fileSchema[category]) as string[]
if (isApiVersioned(version)) {
getOnlyApiVersions(version).forEach(
(apiVersion) => (openAPICheck[apiVersion][category] = subcategories.sort()),
)
for (const apiVersion of getOnlyApiVersions(version)) {
openAPICheck[apiVersion][category] = subcategories.sort()
}
} else {
openAPICheck[version][category] = subcategories.sort()
}
})
})
}
}
return openAPICheck
}
@@ -97,14 +97,14 @@ async function createCheckContentDirectory(contentFiles: string[]): Promise<Chec
: version
})
allCompleteVersions.forEach((version) => {
for (const version of allCompleteVersions) {
if (!checkContent[version][category]) {
checkContent[version][category] = [subCategory]
} else {
checkContent[version][category].push(subCategory)
}
checkContent[version][category].sort()
})
}
}
return checkContent
@@ -122,13 +122,15 @@ function getOnlyApiVersions(version: string): string[] {
function createCheckObj(): CheckObject {
const versions: CheckObject = {}
Object.keys(allVersions).forEach((version) => {
for (const version of Object.keys(allVersions)) {
if (isApiVersioned(version)) {
getOnlyApiVersions(version).forEach((apiVersion) => (versions[apiVersion] = {}))
for (const apiVersion of getOnlyApiVersions(version)) {
versions[apiVersion] = {}
}
} else {
versions[`${allVersions[version].version}`] = {}
}
})
}
return versions
}

View File

@@ -62,9 +62,9 @@ export default async function getCodeSamples(operation: Operation): Promise<Merg
// has the same description, add a number to the example
if (mergedExamples.length > 1) {
const count: Record<string, number> = {}
mergedExamples.forEach((item) => {
for (const item of mergedExamples) {
count[item.request.description] = (count[item.request.description] || 0) + 1
})
}
const newMergedExamples = mergedExamples.map((example, i) => ({
...example,
@@ -204,7 +204,7 @@ export function getRequestExamples(operation: Operation): RequestExample[] {
// Requests can have multiple content types each with their own set of
// examples.
Object.keys(operation.requestBody.content).forEach((contentType) => {
for (const contentType of Object.keys(operation.requestBody.content)) {
let examples: Record<string, any> = {}
// This is a fallback to allow using the `example` property in
// the schema. If we start to enforce using examples vs. example using
@@ -230,13 +230,13 @@ export function getRequestExamples(operation: Operation): RequestExample[] {
parameters: parameterExamples.default,
},
})
return
continue
}
// There can be more than one example for a given content type. We need to
// iterate over the keys of the examples to create individual
// example objects
Object.keys(examples).forEach((key) => {
for (const key of Object.keys(examples)) {
// A content type that includes `+json` is a custom media type
// The default accept header is application/vnd.github.v3+json
// Which would have a content type of `application/json`
@@ -255,8 +255,8 @@ export function getRequestExamples(operation: Operation): RequestExample[] {
},
}
requestExamples.push(example)
})
})
}
}
return requestExamples
}
@@ -279,10 +279,10 @@ export function getRequestExamples(operation: Operation): RequestExample[] {
*/
export function getResponseExamples(operation: Operation): ResponseExample[] {
const responseExamples: ResponseExample[] = []
Object.keys(operation.responses).forEach((statusCode) => {
for (const statusCode of Object.keys(operation.responses)) {
// We don't want to create examples for error codes
// Error codes are displayed in the status table in the docs
if (parseInt(statusCode, 10) >= 400) return
if (parseInt(statusCode, 10) >= 400) continue
const content = operation.responses[statusCode].content
@@ -298,12 +298,12 @@ export function getResponseExamples(operation: Operation): ResponseExample[] {
},
}
responseExamples.push(example)
return
continue
}
// Responses can have multiple content types each with their own set of
// examples.
Object.keys(content).forEach((contentType) => {
for (const contentType of Object.keys(content)) {
let examples: Record<string, any> = {}
// This is a fallback to allow using the `example` property in
// the schema. If we start to enforce using examples vs. example using
@@ -333,18 +333,18 @@ export function getResponseExamples(operation: Operation): ResponseExample[] {
},
}
responseExamples.push(example)
return
continue
} else {
// Example for this content type doesn't exist.
// We could also check if there is a fully populated example
// directly in the response schema examples properties.
return
continue
}
// There can be more than one example for a given content type. We need to
// iterate over the keys of the examples to create individual
// example objects
Object.keys(examples).forEach((key) => {
for (const key of Object.keys(examples)) {
const example = {
key,
response: {
@@ -360,9 +360,9 @@ export function getResponseExamples(operation: Operation): ResponseExample[] {
},
}
responseExamples.push(example)
})
})
})
}
}
}
return responseExamples
}
@@ -383,7 +383,7 @@ export function getParameterExamples(operation: Operation): Record<string, Recor
}
const parameters = operation.parameters.filter((param: any) => param.in === 'path')
const parameterExamples: Record<string, Record<string, any>> = {}
parameters.forEach((parameter: any) => {
for (const parameter of parameters) {
const examples = parameter.examples
// If there are no examples, create an example from the uppercase parameter
// name, so that it is more visible that the value is fake data
@@ -392,11 +392,11 @@ export function getParameterExamples(operation: Operation): Record<string, Recor
if (!parameterExamples.default) parameterExamples.default = {}
parameterExamples.default[parameter.name] = parameter.name.toUpperCase()
} else {
Object.keys(examples).forEach((key) => {
for (const key of Object.keys(examples)) {
if (!parameterExamples[key]) parameterExamples[key] = {}
parameterExamples[key][parameter.name] = examples[key].value
})
}
}
})
}
return parameterExamples
}

View File

@@ -69,7 +69,7 @@ export async function getSchemas(
export async function validateVersionsOptions(versions: string[]): Promise<void> {
const schemas = await getSchemas()
// Validate individual versions provided
versions.forEach((version) => {
for (const version of versions) {
if (
schemas.deprecated.includes(`${version}.deref.json`) ||
schemas.unpublished.includes(`${version}.deref.json`)
@@ -79,5 +79,5 @@ export async function validateVersionsOptions(versions: string[]): Promise<void>
} else if (!schemas.currentReleases.includes(`${version}.deref.json`)) {
throw new Error(`🛑 The version (${version}) you specified is not valid.`)
}
})
}
}

View File

@@ -35,7 +35,7 @@ async function getClientSideRedirects(): Promise<RedirectMap> {
)
const operationRedirects: RedirectMap = {}
Object.values(operationUrls).forEach((value: OperationUrl) => {
for (const value of Object.values(operationUrls)) {
const oldUrl = value.originalUrl.replace('/rest/reference', '/rest')
const anchor = oldUrl.split('#')[1]
const subcategory = value.subcategory
@@ -43,7 +43,7 @@ async function getClientSideRedirects(): Promise<RedirectMap> {
? `/rest/${value.category}/${subcategory}#${anchor}`
: `/rest/${value.category}#${anchor}`
operationRedirects[oldUrl] = redirectTo
})
}
const redirects: RedirectMap = {
...operationRedirects,
...sectionUrls,

View File

@@ -45,9 +45,9 @@ export default class Operation {
if (serverVariables) {
// Template variables structure comes from OpenAPI server variables
const templateVariables: Record<string, any> = {}
Object.keys(serverVariables).forEach(
(key) => (templateVariables[key] = serverVariables[key].default),
)
for (const key of Object.keys(serverVariables)) {
templateVariables[key] = serverVariables[key].default
}
this.serverUrl = parseTemplate(this.serverUrl).expand(templateVariables)
}

View File

@@ -80,7 +80,7 @@ async function formatRestData(operations: Operation[]): Promise<OperationsByCate
const categories = [...new Set(operations.map((operation) => operation.category))].sort()
const operationsByCategory: OperationsByCategory = {}
categories.forEach((category) => {
for (const category of categories) {
operationsByCategory[category] = {}
const categoryOperations = operations.filter((operation) => operation.category === category)
@@ -95,7 +95,7 @@ async function formatRestData(operations: Operation[]): Promise<OperationsByCate
subcategories.unshift(firstItem)
}
subcategories.forEach((subcategory) => {
for (const subcategory of subcategories) {
operationsByCategory[category][subcategory] = []
const subcategoryOperations = categoryOperations.filter(
@@ -103,8 +103,8 @@ async function formatRestData(operations: Operation[]): Promise<OperationsByCate
)
operationsByCategory[category][subcategory] = subcategoryOperations
})
})
}
}
return operationsByCategory
}

View File

@@ -98,10 +98,10 @@ async function getDataFrontmatter(
for (const file of fileList) {
const data = JSON.parse(await readFile(file, 'utf-8'))
const docsVersionName = getDocsVersion(path.basename(path.dirname(file)))
Object.keys(data).forEach((category) => {
for (const category of Object.keys(data)) {
// Used to automatically update Markdown files
const subcategories = Object.keys(data[category])
subcategories.forEach((subcategory) => {
for (const subcategory of subcategories) {
if (!restVersions[category]) {
restVersions[category] = {}
}
@@ -112,8 +112,8 @@ async function getDataFrontmatter(
} else if (!restVersions[category][subcategory].versions.includes(docsVersionName)) {
restVersions[category][subcategory].versions.push(docsVersionName)
}
})
})
}
}
}
return restVersions
}

View File

@@ -53,10 +53,11 @@ describe('rest example requests and responses', () => {
test('check example number and status code appear', async () => {
const mergedExamples = await getCodeSamples(operation)
// example is any because getCodeSamples returns objects from untyped JavaScript module
mergedExamples.forEach((example: any, index: number) => {
for (let index = 0; index < mergedExamples.length; index++) {
const example: any = mergedExamples[index]
expect(example.request.description).toBe(
`Example ${index + 1}: Status Code ${example.response.statusCode}`,
)
})
}
})
})

View File

@@ -74,25 +74,29 @@ describe('markdown for each rest version', () => {
if (isApiVersioned(version)) {
for (const apiVersion of allVersions[version].apiVersions) {
const apiOperations = await getRest(version, apiVersion)
Object.keys(apiOperations).forEach((category) => allCategories.add(category))
for (const category of Object.keys(apiOperations)) {
allCategories.add(category)
}
openApiSchema[version] = apiOperations
}
} else {
const apiOperations = await getRest(version)
Object.keys(apiOperations).forEach((category) => allCategories.add(category))
for (const category of Object.keys(apiOperations)) {
allCategories.add(category)
}
openApiSchema[version] = apiOperations
}
}
// Read the versions from each index.md file to build a list of
// applicable versions for each category
walk('content/rest', { includeBasePath: true, directories: false })
.filter((filename) => filename.includes('index.md'))
.forEach((file) => {
const applicableVersions = getApplicableVersionFromFile(file)
const { category } = getCategorySubcategory(file)
categoryApplicableVersions[category] = applicableVersions
})
for (const file of walk('content/rest', { includeBasePath: true, directories: false }).filter(
(filename) => filename.includes('index.md'),
)) {
const applicableVersions = getApplicableVersionFromFile(file)
const { category } = getCategorySubcategory(file)
categoryApplicableVersions[category] = applicableVersions
}
})
test('markdown file exists for every operationId prefix in all versions of the OpenAPI schema', async () => {
@@ -115,7 +119,7 @@ describe('markdown for each rest version', () => {
test('category and subcategory exist in OpenAPI schema for every applicable version in markdown frontmatter', async () => {
const automatedFiles = getAutomatedMarkdownFiles('content/rest')
automatedFiles.forEach((file) => {
for (const file of automatedFiles) {
const applicableVersions = getApplicableVersionFromFile(file)
const { category, subCategory } = getCategorySubcategory(file)
@@ -129,7 +133,7 @@ describe('markdown for each rest version', () => {
`The versions that apply to category ${category} does not contain the ${version}, as is expected. Please check the versions for file ${file} or look at the index that governs that file (in its parent directory).`,
).toContain(version)
}
})
}
})
})
@@ -155,15 +159,14 @@ describe('OpenAPI schema validation', () => {
// even though the version is not yet supported in the docs)
test('every OpenAPI version must have a schema file in the docs', async () => {
const decoratedFilenames = walk(schemasPath).map((filename) => path.basename(filename, '.json'))
Object.values(allVersions)
.map((version) => version.openApiVersionName)
.forEach((openApiBaseName) => {
// Because the rest calendar dates now have latest, next, or calendar date attached to the name, we're
// now checking if the decorated file names now start with an openApiBaseName
expect(
decoratedFilenames.some((versionFile) => versionFile.startsWith(openApiBaseName)),
).toBe(true)
})
const openApiBaseNames = Object.values(allVersions).map((version) => version.openApiVersionName)
for (const openApiBaseName of openApiBaseNames) {
// Because the rest calendar dates now have latest, next, or calendar date attached to the name, we're
// now checking if the decorated file names now start with an openApiBaseName
expect(
decoratedFilenames.some((versionFile) => versionFile.startsWith(openApiBaseName)),
).toBe(true)
}
})
test('operations object structure organized by version, category, and subcategory', async () => {
@@ -214,10 +217,10 @@ describe('code examples are defined', () => {
expect(isPlainObject(operation)).toBe(true)
expect(operation.codeExamples).toBeDefined()
// Code examples have dynamic structure from OpenAPI schema
operation.codeExamples.forEach((example: any) => {
for (const example of operation.codeExamples as any[]) {
expect(isPlainObject(example.request)).toBe(true)
expect(isPlainObject(example.response)).toBe(true)
})
}
}
})
})

View File

@@ -29,7 +29,7 @@ describe('REST references docs', () => {
// These tests exists because of issue #1960
test('rest subcategory with fpt in URL', async () => {
for (const category of [
const categories = [
'migrations',
'actions',
'activity',
@@ -58,7 +58,8 @@ describe('REST references docs', () => {
'search',
'teams',
'users',
]) {
]
for (const category of categories) {
// Without language prefix
{
const res = await get(`/free-pro-team@latest/rest/reference/${category}`)

View File

@@ -93,9 +93,9 @@ export function useAISearchLocalStorageCache<T = any>(
index.sort((a, b) => a.timestamp - b.timestamp)
const excess = index.length - maxEntries
const entriesToRemove = index.slice(0, excess)
entriesToRemove.forEach((entry) => {
for (const entry of entriesToRemove) {
localStorage.removeItem(entry.key)
})
}
index = index.slice(excess)
}

View File

@@ -43,7 +43,7 @@ export function useMultiQueryParams() {
const [asPathWithoutHash] = router.asPath.split('#')
const [asPathRoot, asPathQuery = ''] = asPathWithoutHash.split('?')
const searchParams = new URLSearchParams(asPathQuery)
initialKeys.forEach((key) => {
for (const key of initialKeys) {
if (key === 'search-overlay-ask-ai') {
if (newParams[key] === 'true') {
searchParams.set(key, 'true')
@@ -57,7 +57,7 @@ export function useMultiQueryParams() {
searchParams.delete(key)
}
}
})
}
const paramsString = searchParams.toString() ? `?${searchParams.toString()}` : ''
let newUrl = `${asPathRoot}${paramsString}`
if (asPathRoot !== '/' && router.locale) {

Some files were not shown because too many files have changed in this diff Show More