1
0
mirror of synced 2025-12-19 09:57:42 -05:00

Remove disabling of prefer-template eslint rule (#58230)

This commit is contained in:
Kevin Heis
2025-10-30 09:24:12 -07:00
committed by GitHub
parent 4f46d28722
commit 7d1a209660
72 changed files with 234 additions and 250 deletions

View File

@@ -93,14 +93,13 @@ export default [
'no-redeclare': 'off', // Allow function overloads in TypeScript
'i18n-text/no-en': 'off', // This rule causes eslint to not run at all
'filenames/match-regex': 'off', // This rule causes eslint to not run at all
camelcase: 'off', // Many gh apis use underscores, 600+ uses
// Disabled rules to review
'github/no-then': 'off', // 30+
'@typescript-eslint/ban-ts-comment': 'off', // 50+
'no-shadow': 'off', // 150+
'prefer-template': 'off', // 150+
'github/array-foreach': 'off', // 250+
camelcase: 'off', // 600+
'no-console': 'off', // 800+
'@typescript-eslint/no-explicit-any': 'off', // 1000+
},

View File

@@ -34,7 +34,7 @@ export function hasLanguagePrefix(path: string): boolean {
export function stripLanguagePrefix(path: string): string {
if (hasLanguagePrefix(path)) {
const pathSegments = path.split('/')
return '/' + pathSegments.slice(2).join('/')
return `/${pathSegments.slice(2).join('/')}`
}
return path
}

View File

@@ -353,14 +353,14 @@ function getProxyPath(reqPath: string, requestedVersion: string) {
// Releases 2.18 and higher
if (versionSatisfiesRange(requestedVersion, `>${lastVersionWithoutArchivedRedirectsFile}`)) {
const newReqPath = reqPath.includes('redirects.json') ? `/${reqPath}` : reqPath + '/index.html'
const newReqPath = reqPath.includes('redirects.json') ? `/${reqPath}` : `${reqPath}/index.html`
return ENTERPRISE_GH_PAGES_URL_PREFIX + requestedVersion + newReqPath
}
// Releases 2.13 - 2.17
// redirect.json files don't exist for these versions
if (versionSatisfiesRange(requestedVersion, `>=2.13`)) {
return ENTERPRISE_GH_PAGES_URL_PREFIX + requestedVersion + reqPath + '/index.html'
return `${ENTERPRISE_GH_PAGES_URL_PREFIX + requestedVersion + reqPath}/index.html`
}
// Releases 2.12 and lower

View File

@@ -167,7 +167,7 @@ function incrementArticleLookup(
const source =
req.get('X-Request-Source') ||
(req.get('Referer')
? 'external-' + (new URL(req.get('Referer') || '').hostname || 'unknown')
? `external-${new URL(req.get('Referer') || '').hostname || 'unknown'}`
: 'external')
const tags = [

View File

@@ -156,10 +156,10 @@ function updateReadme(readmePath: string, markdown: string): void {
// Replace API documentation section, or append to end
if (readme.includes(placeholderComment)) {
const pattern = new RegExp(placeholderComment + '[\\s\\S]*', 'g')
readme = readme.replace(pattern, placeholderComment + '\n' + markdown)
const pattern = new RegExp(`${placeholderComment}[\\s\\S]*`, 'g')
readme = readme.replace(pattern, `${placeholderComment}\n${markdown}`)
} else {
readme += '\n' + markdown
readme += `\n${markdown}`
}
writeFileSync(readmePath, readme)

View File

@@ -103,7 +103,7 @@ describe('dynamic assets', () => {
})
test.each(['key', 'key=value'])('any query string (%p) triggers a redirect', async (qs) => {
const res = await get('/assets/images/_fixtures/screenshot.webp?' + qs)
const res = await get(`/assets/images/_fixtures/screenshot.webp?${qs}`)
expect(res.statusCode).toBe(302)
expect(res.headers.location).toBe('/assets/images/_fixtures/screenshot.webp')
expect(res.headers['cache-control']).toContain('public')

View File

@@ -113,7 +113,7 @@ describe('static assets', () => {
// This picks the first one found. We just need it to be anything
// that actually resolves.
const filePath = getNextStaticAsset('css')
const asURL = '/' + filePath.replace('.next', '_next').split(path.sep).join('/')
const asURL = `/${filePath.replace('.next', '_next').split(path.sep).join('/')}`
const res = await get(asURL)
expect(res.statusCode).toBe(200)
checkCachingHeaders(res)

View File

@@ -41,7 +41,7 @@ async function main() {
const matchHeading = '## Options\n'
const primaryHeadingSourceContent = sourceContent.replace(
matchHeading,
matchHeading + '\n### Primary Options\n',
`${matchHeading}\n### Primary Options\n`,
)
const currentFileName = path.basename(file)
const { data, content } = await convertContentToDocs(

View File

@@ -56,7 +56,7 @@ async function testCircularLinkFix(): Promise<boolean> {
}
console.log('\n--- Generated content preview ---')
console.log(result1.content.substring(0, 800) + '...')
console.log(`${result1.content.substring(0, 800)}...`)
return !hasCircularLink && hasValidLink
} catch (error) {

View File

@@ -81,7 +81,7 @@ export const codeAnnotationCommentSpacing = {
// No space after comment character - this is an error
const lineNumber: number = token.lineNumber + index + 1
const leadingWhitespace: string = line.match(/^\s*/)![0]
const fixedLine: string = leadingWhitespace + commentChar + ' ' + restOfLine
const fixedLine: string = `${leadingWhitespace + commentChar} ${restOfLine}`
addError(
onError,

View File

@@ -26,7 +26,7 @@ export const imageAltTextEndPunctuation: Rule = {
const range = getRange(token.line, imageAltText)
addFixErrorDetail(onError, token.lineNumber, imageAltText + '.', imageAltText, range, {
addFixErrorDetail(onError, token.lineNumber, `${imageAltText}.`, imageAltText, range, {
lineNumber: token.lineNumber,
editColumn: isStringQuoted(imageAltText)
? token.line.indexOf(']')

View File

@@ -26,7 +26,7 @@ export const linkQuotation: Rule = {
} else if (inLinkWithPrecedingQuotes && child.type === 'text') {
content.push(escapeRegExp((child.content || '').trim()))
} else if (inLinkWithPrecedingQuotes && child.type === 'code_inline') {
content.push('`' + escapeRegExp((child.content || '').trim()) + '`')
content.push(`\`${escapeRegExp((child.content || '').trim())}\``)
} else if (child.type === 'link_close') {
const title = content.join(' ')
const regex = new RegExp(`"\\[${title}\\]\\(${linkUrl}\\)({%.*%})?(!|\\.|\\?|,)?"`)
@@ -44,7 +44,7 @@ export const linkQuotation: Rule = {
newLine = newLine.slice(0, -1)
}
if (newLine.endsWith('".')) {
newLine = newLine.slice(0, -2) + '.'
newLine = `${newLine.slice(0, -2)}.`
}
const lineNumber = child.lineNumber
addError(

View File

@@ -104,7 +104,7 @@ function setLiquidErrors(condTagItems: any[], onError: RuleErrorCallback, lines:
for (let i = 0; i < condTagItems.length; i++) {
const item = condTagItems[i]
const tagNameNoCond = item.name === 'endif' || item.name === 'else'
const itemErrorName = tagNameNoCond ? item.name : item.name + ' ' + item.cond
const itemErrorName = tagNameNoCond ? item.name : `${item.name} ${item.cond}`
if (item.action.type === 'delete') {
// There is no next stack item, the endif tag is alway the
@@ -438,7 +438,7 @@ function updateConditionals(condTagItems: any[]) {
const newVersions = Object.entries(item.versionsObj).map(([key, value]) => {
if (key === 'ghes') {
if (value === '*') return key
return key + ' ' + value
return `${key} ${value}`
} else return key
})
item.action.cond = newVersions.join(' or ')

View File

@@ -55,7 +55,7 @@ export const frontmatterLiquidSyntax = {
addError(
onError,
lineNumber,
'Liquid syntax error: ' + errorDescription,
`Liquid syntax error: ${errorDescription}`,
value,
range,
null, // No fix possible
@@ -92,7 +92,7 @@ export const liquidSyntax = {
addError(
onError,
lineNumber,
'Liquid syntax error: ' + errorDescription,
`Liquid syntax error: ${errorDescription}`,
line,
range,
null, // No fix possible

View File

@@ -46,14 +46,13 @@ export const liquidTagWhitespace: Rule = {
const openTag = tag.slice(0, token.contentRange[0] - token.begin)
const closeTag = tag.slice(-(token.end - token.contentRange[1]))
const isOpenTagOneSpace = openTag !== openTag.trim() + ' '
const isCloseTagOneSpace = closeTag !== ' ' + closeTag.trim()
const isOpenTagOneSpace = openTag !== `${openTag.trim()} `
const isCloseTagOneSpace = closeTag !== ` ${closeTag.trim()}`
const moreThanOneSpace = /\s{2,}/
const isArgOneSpace = moreThanOneSpace.test(tag)
const fixedContent =
openTag.trim() + ' ' + token.content.replace(moreThanOneSpace, ' ') + ' ' + closeTag.trim()
const fixedContent = `${openTag.trim()} ${token.content.replace(moreThanOneSpace, ' ')} ${closeTag.trim()}`
if (isOpenTagOneSpace || isCloseTagOneSpace || isArgOneSpace) {
addFixErrorDetail(

View File

@@ -37,7 +37,7 @@ export const octiconAriaLabels: Rule = {
const octiconNameMatch = token.args.match(/["']([^"']+)["']/)
const octiconName = octiconNameMatch ? octiconNameMatch[1] : 'icon'
const originalContent = token.content
const fixedContent = originalContent + ` aria-label="${octiconName}"`
const fixedContent = `${originalContent} aria-label="${octiconName}"`
addFixErrorDetail(
onError,

View File

@@ -104,11 +104,10 @@ async function main(options: Options) {
function getVariables(): Map<string, string> {
const variables = new Map<string, string>()
for (const filePath of walkFiles('data/variables', '.yml')) {
const dottedPathBase =
'variables.' + filePath.replace('data/variables/', '').replace('.yml', '').replace(/\//g, '.')
const dottedPathBase = `variables.${filePath.replace('data/variables/', '').replace('.yml', '').replace(/\//g, '.')}`
const data = yaml.load(fs.readFileSync(filePath, 'utf-8')) as Record<string, unknown>
for (const key of Object.keys(data)) {
const dottedPath = dottedPathBase + '.' + key
const dottedPath = `${dottedPathBase}.${key}`
variables.set(dottedPath, filePath)
}
}

View File

@@ -170,14 +170,14 @@ function indentWrappedString(str: string, startingIndent: number): string {
if ((currentLine + word).length > effectiveWidth) {
if (isFirstLine) {
indentedString += currentLine.trim() + '\n'
indentedString += `${currentLine.trim()}\n`
isFirstLine = false
} else {
indentedString += NEW_LINE_PADDING + currentLine.trim() + '\n'
indentedString += `${NEW_LINE_PADDING + currentLine.trim()}\n`
}
currentLine = word + ' '
currentLine = `${word} `
} else {
currentLine += word + ' '
currentLine += `${word} `
}
}
if (isFirstLine) {

View File

@@ -145,7 +145,7 @@ describe(outdatedReleasePhaseTerminology.names.join(' - '), () => {
const markdown = ['This feature is in beta.'].join('\n')
const result = await runRule(outdatedReleasePhaseTerminology, {
strings: {
markdown: frontmatter + '\n' + markdown,
markdown: `${frontmatter}\n${markdown}`,
},
})
const errors = result.markdown

View File

@@ -18,7 +18,7 @@ interface OcticonsMatch {
}
const OptionsSyntax = /([a-zA-Z-]+)="([\w\s-]+)"*/g
const Syntax = new RegExp('"(?<icon>[a-zA-Z-]+)"(?<options>(?:\\s' + OptionsSyntax.source + ')*)')
const Syntax = new RegExp(`"(?<icon>[a-zA-Z-]+)"(?<options>(?:\\s${OptionsSyntax.source})*)`)
const SyntaxHelp = 'Syntax Error in tag \'octicon\' - Valid syntax: octicon "<name>" <key="value">'
/**

View File

@@ -268,7 +268,7 @@ function makeHref(root, filePath) {
} else {
nameSplit.push(nameSplit.pop().replace(/\.md$/, ''))
}
return '/' + nameSplit.join('/')
return `/${nameSplit.join('/')}`
}
function moveFolder(oldPath, newPath, files, opts) {

View File

@@ -28,7 +28,7 @@ export function findUnused({ absolute }: { absolute: boolean }) {
(name === 'data' || name === 'indented_data_reference') &&
args.startsWith('reusables.')
) {
const reusableName = path.join('data', ...args.split(' ')[0].split('.')) + '.md'
const reusableName = `${path.join('data', ...args.split(' ')[0].split('.'))}.md`
// Special cases where we don't want them to count as reusables. It's an example in a how-to doc
if (
reusableName.includes('foo/bar.md') ||

View File

@@ -65,7 +65,7 @@ export function findTopUsed(numberOfMostUsedToFind: number, { absolute }: { abso
console.log(`\nTop ${numberOfMostUsedToFind} most used reusables:`)
let i = 0
for (const [reusable, count] of sortedCounts.slice(0, numberOfMostUsedToFind)) {
let printReusablePath = path.join('data', ...reusable.split('.')) + '.md'
let printReusablePath = `${path.join('data', ...reusable.split('.'))}.md`
if (absolute) {
printReusablePath = path.resolve(printReusablePath)
}

View File

@@ -31,7 +31,7 @@ async function main(nameTuple: [string, string]) {
const parentIndexMd = path.join(path.dirname(after), 'index.md')
const fileContent = fs.readFileSync(parentIndexMd, 'utf-8')
const { data } = readFrontmatter(fileContent)
const afterShortname = '/' + after.split('/').slice(-1)[0].replace(/\.md$/, '')
const afterShortname = `/${after.split('/').slice(-1)[0].replace(/\.md$/, '')}`
if (data) assert(data.children.includes(afterShortname), `Child ${afterShortname} not found`)
}
} else {
@@ -43,7 +43,7 @@ async function main(nameTuple: [string, string]) {
const parentIndexMd = path.join(path.dirname(after), 'index.md')
const fileContent = fs.readFileSync(parentIndexMd, 'utf-8')
const { data } = readFrontmatter(fileContent)
const afterShortname = '/' + after.split('/').slice(-1)
const afterShortname = `/${after.split('/').slice(-1)}`
if (data) assert(data.children.includes(afterShortname), `Child ${afterShortname} not found`)
}
}
@@ -57,5 +57,5 @@ function makeHref(root: string, filePath: string) {
const last = nameSplit.pop()
if (last) nameSplit.push(last.replace(/\.md$/, ''))
}
return '/' + nameSplit.join('/')
return `/${nameSplit.join('/')}`
}

View File

@@ -43,7 +43,7 @@ export default function alerts({ alertTitles = {} }: { alertTitles?: Record<stri
}
const alertType = alertTypes[getAlertKey(node).toUpperCase()]
node.tagName = 'div'
node.properties.className = 'ghd-alert ghd-alert-' + alertType.color
node.properties.className = `ghd-alert ghd-alert-${alertType.color}`
node.properties.dataContainer = 'alert'
node.children = [
h(

View File

@@ -886,8 +886,8 @@ test.describe('translations', () => {
// Playwright will cache this redirect, so we need to add something
// to "cache bust" the URL
const cb = `?cb=${Math.random()}`
await page.goto('/get-started/start-your-journey/hello-world' + cb)
await expect(page).toHaveURL('/ja/get-started/start-your-journey/hello-world' + cb)
await page.goto(`/get-started/start-your-journey/hello-world${cb}`)
await expect(page).toHaveURL(`/ja/get-started/start-your-journey/hello-world${cb}`)
// If you go, with the Japanese cookie, to the English page directly,
// it will offer a link to the Japanese URL in a banner.

View File

@@ -64,7 +64,7 @@ export const DefaultLayout = (props: Props) => {
const metaDescription = page.introPlainText ? page.introPlainText : t('default_description')
const SOCIAL_CATEGORIES = new Set(['code-security', 'actions', 'issues', 'copilot'])
const SOCIAL_CARD_IMG_BASE_URL = `${xHost ? 'https://' + xHost : ''}/assets/cb-345/images/social-cards`
const SOCIAL_CARD_IMG_BASE_URL = `${xHost ? `https://${xHost}` : ''}/assets/cb-345/images/social-cards`
function getCategoryImageUrl(category: string): string {
return `${SOCIAL_CARD_IMG_BASE_URL}/${category}.png`

View File

@@ -266,7 +266,7 @@ export const getMainContext = async (req: any, res: any): Promise<MainContextT>
enterpriseServerVersions: req.context.enterpriseServerVersions,
error: req.context.error ? req.context.error.toString() : '',
featureFlags: {},
fullUrl: req.protocol + '://' + req.hostname + req.originalUrl, // does not include port for localhost
fullUrl: `${req.protocol}://${req.hostname}${req.originalUrl}`, // does not include port for localhost
isHomepageVersion: req.context.page?.documentType === 'homepage',
nonEnterpriseDefaultVersion: req.context.nonEnterpriseDefaultVersion,
page: pageInfo,

View File

@@ -174,7 +174,7 @@ export async function getAutomatedPageMiniTocItems(
for (let i = 0; i < depth; i++) {
title += '#'
}
return title + ` ${item}\n`
return `${title} ${item}\n`
})
.join('')

View File

@@ -57,7 +57,7 @@ export function readCompressedJsonFileFallbackLazily(xpath: string): () => any {
// err is any because fs errors can have various shapes with code property
if (err.code === 'ENOENT') {
try {
fs.accessSync(xpath + '.br')
fs.accessSync(`${xpath}.br`)
} catch (err: any) {
// err is any because fs errors can have various shapes with code property
if (err.code === 'ENOENT') {

View File

@@ -110,7 +110,7 @@ async function rereadByPath(
// but perhaps one day we can always and only do these kinds of lookups
// at runtime.
const possible = path.join(contentRoot, withoutVersion)
const filePath = existsSync(possible) ? path.join(possible, 'index.md') : possible + '.md'
const filePath = existsSync(possible) ? path.join(possible, 'index.md') : `${possible}.md`
const relativePath = path.relative(contentRoot, filePath)
const basePath = contentRoot

View File

@@ -30,7 +30,7 @@ export default function handleNextDataPath(
if (parts[1] === 'free-pro-team@latest') {
parts.splice(1, 1)
}
req.pagePath = '/' + parts.join('/').replace(/.json+$/, '')
req.pagePath = `/${parts.join('/').replace(/.json+$/, '')}`
} else {
req.pagePath = req.path
}

View File

@@ -106,7 +106,7 @@ export default async function renderPage(req: ExtendedRequest, res: Response) {
req.context.currentVersion === 'free-pro-team@latest' ||
!allVersions[req.context.currentVersion!]
) {
page.fullTitle += ' - ' + context.site!.data.ui.header.github_docs
page.fullTitle += ` - ${context.site!.data.ui.header.github_docs}`
} else {
const { versionTitle } = allVersions[req.context.currentVersion!]
page.fullTitle += ' - '
@@ -116,7 +116,7 @@ export default async function renderPage(req: ExtendedRequest, res: Response) {
if (!versionTitle.includes('GitHub')) {
page.fullTitle += 'GitHub '
}
page.fullTitle += versionTitle + ' Docs'
page.fullTitle += `${versionTitle} Docs`
}
}

View File

@@ -78,16 +78,14 @@ describe('pages module', () => {
.map(([path]) => path)
// Build a detailed message with sources for each duplicate
const message =
`Found ${duplicates.length} duplicate redirect_from path${duplicates.length === 1 ? '' : 's'}.
const message = `Found ${duplicates.length} duplicate redirect_from path${duplicates.length === 1 ? '' : 's'}.
Ensure that you don't define the same path more than once in the redirect_from property in a single file and across all English files.
You may also receive this error if you have defined the same children property more than once.\n` +
duplicates
You may also receive this error if you have defined the same children property more than once.\n${duplicates
.map((dup) => {
const files = Array.from(redirectToFiles.get(dup) || [])
return `${dup}\n Defined in:\n ${files.join('\n ')}`
})
.join('\n\n')
.join('\n\n')}`
expect(duplicates.length, message).toBe(0)
})
@@ -136,10 +134,12 @@ describe('pages module', () => {
.flatten()
.value()
const failureMessage =
JSON.stringify(frontmatterErrors, null, 2) +
'\n\n' +
chain(frontmatterErrors).map('filepath').join('\n').value()
const failureMessage = `${JSON.stringify(frontmatterErrors, null, 2)}\n\n${chain(
frontmatterErrors,
)
.map('filepath')
.join('\n')
.value()}`
expect(frontmatterErrors.length, failureMessage).toBe(0)
})

View File

@@ -47,7 +47,7 @@ describe('toc links', () => {
}
}
const message = 'broken link in a TOC: ' + JSON.stringify(issues, null, 2)
const message = `broken link in a TOC: ${JSON.stringify(issues, null, 2)}`
expect(issues.length, message).toBe(0)
})
})

View File

@@ -359,10 +359,10 @@ async function isExistingIssue(
let query = encodeURIComponent(`is:issue repo:${repo} `)
if (searchQuery) {
query += '+' + searchQuery
query += `+${searchQuery}`
}
if (labelQuery) {
query += '+' + labelQuery
query += `+${labelQuery}`
}
const issues = await octokit.request(`GET /search/issues?q=${query}`)

View File

@@ -28,7 +28,7 @@ export function updateContentFiles() {
let featureData = undefined
if (data.versions.feature) {
const featureFilePath = 'data/features/' + data.versions.feature + '.yml'
const featureFilePath = `data/features/${data.versions.feature}.yml`
const featureContent = fs.readFileSync(featureFilePath, 'utf8')
featureData = yaml.load(featureContent) as featureDataType
if (!featureData || !featureData.versions)
@@ -117,8 +117,8 @@ function removeFileUpdateParent(filePath: string) {
if (!data) return
// Children paths are relative to the index.md file's directory
const childPath = filePath.endsWith('index.md')
? '/' + path.basename(path.dirname(filePath))
: '/' + path.basename(filePath, '.md')
? `/${path.basename(path.dirname(filePath))}`
: `/${path.basename(filePath, '.md')}`
// Remove the childPath from the parent index.md file's children frontmatter
data.children = data.children.filter((child) => child !== childPath)

View File

@@ -448,11 +448,11 @@ function getDisplayTitle(
const displayTitle = isRest
? !resourceGroup
? sentenceCase(title) + ' permissions'
: `"${sentenceCase(title)}" ` + resourceGroup + ' permissions'
? `${sentenceCase(title)} permissions`
: `"${sentenceCase(title)}" ${resourceGroup} permissions`
: !resourceGroup
? sentenceCase(title) + ' permissions'
: sentenceCase(resourceGroup) + ` permissions for "${title}"`
? `${sentenceCase(title)} permissions`
: `${sentenceCase(resourceGroup)} permissions for "${title}"`
return { title, displayTitle }
}

View File

@@ -76,7 +76,7 @@ describe('REST references docs', () => {
...value.map(
(item: EnabledItem) =>
`/en/rest/${key}${
categoriesWithoutSubcategories.includes(key) ? '' : '/' + item.subcategory
categoriesWithoutSubcategories.includes(key) ? '' : `/${item.subcategory}`
}#${item.slug}`,
),
)
@@ -110,7 +110,7 @@ describe('REST references docs', () => {
...value.permissions.map(
(item: PermissionItem) =>
`/en/rest/${item.category}${
categoriesWithoutSubcategories.includes(item.category) ? '' : '/' + item.subcategory
categoriesWithoutSubcategories.includes(item.category) ? '' : `/${item.subcategory}`
}#${item.slug}`,
),
)

View File

@@ -175,12 +175,9 @@ export async function createChangelogEntry(
}),
)
const cleanTitle = cleanPreviewTitle(previewTitle)
const entryTitle =
'The [' +
cleanTitle +
'](/graphql/overview/schema-previews#' +
previewAnchor(cleanTitle) +
') includes these changes:'
const entryTitle = `The [${cleanTitle}](/graphql/overview/schema-previews#${previewAnchor(
cleanTitle,
)}) includes these changes:`
changelogEntry.previewChanges.push({
title: entryTitle,
changes: renderedPreviewChanges,
@@ -220,7 +217,7 @@ export function cleanPreviewTitle(title: string): string {
} else if (title === 'MergeInfoPreview') {
title = 'Merge info preview'
} else if (!title.endsWith('preview')) {
title = title + ' preview'
title = `${title} preview`
}
return title
}

View File

@@ -59,7 +59,7 @@ const graphqlTypes: GraphQLTypeInfo[] = JSON.parse(
const singleQuotesInsteadOfBackticks = / '(\S+?)' /
function addPeriod(string: string): string {
return string.endsWith('.') ? string : string + '.'
return string.endsWith('.') ? string : `${string}.`
}
async function getArguments(

View File

@@ -146,7 +146,7 @@ export const getProductLandingContextFromRequest = async (
key,
label:
key === 'popular' || key === 'videos'
? req.context.page.featuredLinks[key + 'Heading'] || req.context.site.data.ui.toc[key]
? req.context.page.featuredLinks[`${key}Heading`] || req.context.site.data.ui.toc[key]
: req.context.site.data.ui.toc[key],
viewAllHref:
key === 'startHere' && !req.context.currentCategory && hasGuidesPage

View File

@@ -135,10 +135,10 @@ function RestNavListItem({ category }: { category: ProductTreeNode }) {
(entries) => {
entries.forEach((entry) => {
if (entry.target.id) {
const anchor = '#' + entry.target.id.split('--')[0]
const anchor = `#${entry.target.id.split('--')[0]}`
if (entry.isIntersecting === true) setVisibleAnchor(anchor)
} else if (asPath.includes('#')) {
setVisibleAnchor('#' + asPath.split('#')[1])
setVisibleAnchor(`#${asPath.split('#')[1]}`)
} else {
setVisibleAnchor('')
}

View File

@@ -84,7 +84,7 @@ export const LanguagePicker = ({ xs, mediumOrLower }: Props) => {
className={`color-fg-default width-full ${styles.menuButton}`}
aria-label={`Select language: current language is ${selectedLang.name}`}
>
<span className={styles.languageLabel}>{t('language_picker_label') + '\n'}</span>
<span className={styles.languageLabel}>{`${t('language_picker_label')}\n`}</span>
<span className="color-fg-muted text-normal f6">{selectedLang.name}</span>
</ActionMenu.Button>
</ActionMenu.Anchor>

View File

@@ -253,7 +253,7 @@ export function correctTranslatedContentStrings(
return match
}
const withLinebreak = match.slice(0, -1) + '\n'
const withLinebreak = `${match.slice(0, -1)}\n`
if (englishContent.includes(withLinebreak) && !englishContent.includes(match)) {
return withLinebreak
}
@@ -332,7 +332,7 @@ export function correctTranslatedContentStrings(
const keyString = '5DE3 E050 9C47 EA3C F04A 42D3 4AEE 18F8 3AFD EB23'
const translatedSentences = [
// ru
'Полный отпечаток ключа\u00A0\u2014 `' + keyString + '`.',
`Полный отпечаток ключа\u00A0\u2014 \`${keyString}\`.`,
// ko
`키의 전체 지문은 \`${keyString}\`입니다.`,
// es

View File

@@ -70,7 +70,7 @@ export function createTranslationFallbackComment(error: Error, property: string)
// Limit message length to keep comment manageable
if (cleanMessage.length > 200) {
cleanMessage = cleanMessage.substring(0, 200) + '...'
cleanMessage = `${cleanMessage.substring(0, 200)}...`
}
errorDetails.push(`msg="${cleanMessage.replace(/"/g, "'")}"`)
@@ -141,7 +141,7 @@ export async function renderContentWithFallback(
// Skip for textOnly rendering to avoid breaking plain text output
if (context.currentLanguage !== 'en' && !options?.textOnly) {
const errorComment = createTranslationFallbackComment(error as Error, property)
return errorComment + '\n' + fallbackContent
return `${errorComment}\n${fallbackContent}`
}
return fallbackContent
@@ -181,7 +181,7 @@ export async function executeWithFallback<T>(
// Only for HTML content (detected by presence of HTML tags)
if (typeof fallbackContent === 'string' && /<[^>]+>/.test(fallbackContent)) {
const errorComment = createTranslationFallbackComment(error as Error, 'content')
return (errorComment + '\n' + fallbackContent) as T
return `${errorComment}\n${fallbackContent}` as T
}
return fallbackContent

View File

@@ -19,8 +19,7 @@ export function createTranslationFunctions(uiData: UIStrings, namespaces: string
if (missingNamespaces.length > 0) {
console.warn(
`Missing namespaces [${missingNamespaces.join(', ')}] in UI data. ` +
'Available namespaces: ' +
Object.keys(uiData).sort().join(', '),
`Available namespaces: ${Object.keys(uiData).sort().join(', ')}`,
)
// For 404 pages, we can't afford to throw errors; create defensive fallbacks

View File

@@ -14,7 +14,7 @@ describeIfElasticsearchURL('search v1 middleware in non-English', () => {
// which clearly has a record with the title "Foo"
sp.set('query', 'foo')
sp.set('language', 'ja')
const res = await get('/api/search/v1?' + sp)
const res = await get(`/api/search/v1?${sp}`)
expect(res.statusCode).toBe(200)
const results = JSON.parse(res.body)

View File

@@ -611,7 +611,7 @@ function flawIssueDisplay(flaws: LinkFlaw[], opts: Options, mentionExternalExclu
// limit is 65536
if (output.length > 60000) {
output = output.slice(0, 60000) + '\n\n---\n\nOUTPUT TRUNCATED'
output = `${output.slice(0, 60000)}\n\n---\n\nOUTPUT TRUNCATED`
}
return output
@@ -950,7 +950,7 @@ async function checkHrefLink(
// 6. 'https://example.com' (external link)
const [pathFragment, hashFragment] = href.split('#')
const hash = '#' + hashFragment // the hash is the part that starts with `#`
const hash = `#${hashFragment}` // the hash is the part that starts with `#`
// this conditional handles cases in which the link is to the current article (cases 1-3 above)
if (checkAnchors && (!pathFragment || pathFragment === permalink.href)) {

View File

@@ -38,7 +38,7 @@ export function generateNewJSON(
const writeTo = options.output || destinationFilePath
// It's important that this serializes exactly like the Ruby code
// that is the CLI script `script/add-docs-url` in github/github.
const serialized = JSON.stringify(destination, null, 2) + '\n'
const serialized = `${JSON.stringify(destination, null, 2)}\n`
fs.writeFileSync(writeTo, serialized, 'utf-8')
console.log(`Wrote ${countChanges} change${countChanges === 1 ? '' : 's'} to ${writeTo}`)
if (writeTo !== destinationFilePath) {

View File

@@ -77,7 +77,7 @@ async function main(): Promise<void> {
console.log(csvEntry)
results.push(csvEntry)
}
csvString += results.join('\n') + '\n'
csvString += `${results.join('\n')}\n`
fs.writeFileSync(outputFile, csvString.trim(), 'utf8')
console.log(`Done! Wrote ${outputFile}`)

View File

@@ -40,13 +40,13 @@ function stringify(data: Record<string, any>): string {
stringValue = stringValue.replace(/["\\]/g, '\\$&')
}
if (needs_quoting || needs_escaping) {
stringValue = '"' + stringValue + '"'
stringValue = `"${stringValue}"`
}
if (stringValue === '' && !is_null) {
stringValue = '""'
}
line += key + '=' + stringValue + ' '
line += `${key}=${stringValue} `
}
// trim trailing space

View File

@@ -47,7 +47,7 @@ export function getAutomaticRequestLogger() {
toLogfmt({
...loggerContext,
status,
responseTime: responseTime + ' ms',
responseTime: `${responseTime} ms`,
contentLength: String(contentLength),
method,
url,
@@ -71,7 +71,7 @@ export function getAutomaticRequestLogger() {
chalk.reset(method),
chalk.reset(url),
chalk[color](status),
chalk.reset(responseTime + ' ms'),
chalk.reset(`${responseTime} ms`),
chalk.reset('-'),
chalk.reset(String(contentLength)),
].join(' ')

View File

@@ -73,11 +73,10 @@ export default function getRedirect(uri: string, context: Context): string | und
if (withoutLanguage.startsWith(nonEnterpriseDefaultVersionPrefix)) {
// E.g. '/free-pro-team@latest/foo/bar' or '/free-pro-team@latest'
basicCorrection =
`/${language}` + withoutLanguage.replace(nonEnterpriseDefaultVersionPrefix, '')
basicCorrection = `/${language}${withoutLanguage.replace(nonEnterpriseDefaultVersionPrefix, '')}`
} else if (withoutLanguage.replace('/', '') in allVersions && !languagePrefixRegex.test(uri)) {
// E.g. just '/github-ae@latest' or '/enterprise-cloud@latest'
basicCorrection = `/${language}` + withoutLanguage
basicCorrection = `/${language}${withoutLanguage}`
return basicCorrection
}
@@ -86,18 +85,20 @@ export default function getRedirect(uri: string, context: Context): string | und
withoutLanguage.startsWith('/enterprise-server/')
) {
// E.g. '/enterprise-server' or '/enterprise-server/3.0/foo'
basicCorrection =
`/${language}` +
withoutLanguage.replace('/enterprise-server', `/enterprise-server@${latestStable}`)
basicCorrection = `/${language}${withoutLanguage.replace(
'/enterprise-server',
`/enterprise-server@${latestStable}`,
)}`
// If it's now just the version, without anything after, exit here
if (withoutLanguage === '/enterprise-server') {
return basicCorrection
}
} else if (withoutLanguage.startsWith('/enterprise-server@latest')) {
// E.g. '/enterprise-server@latest' or '/enterprise-server@latest/3.3/foo'
basicCorrection =
`/${language}` +
withoutLanguage.replace('/enterprise-server@latest', `/enterprise-server@${latestStable}`)
basicCorrection = `/${language}${withoutLanguage.replace(
'/enterprise-server@latest',
`/enterprise-server@${latestStable}`,
)}`
// If it was *just* '/enterprise-server@latest' all that's needed is
// the language but with 'latest' replaced with the value of `latest`
if (withoutLanguage === '/enterprise-server@latest') {
@@ -115,14 +116,16 @@ export default function getRedirect(uri: string, context: Context): string | und
const version = withoutLanguage.split('/')[2]
if (withoutLanguage === `/enterprise/${version}`) {
// E.g. `/enterprise/3.0`
basicCorrection =
`/${language}` +
withoutLanguage.replace(`/enterprise/${version}`, `/enterprise-server@${version}`)
basicCorrection = `/${language}${withoutLanguage.replace(
`/enterprise/${version}`,
`/enterprise-server@${version}`,
)}`
return basicCorrection
} else {
basicCorrection =
`/${language}` +
withoutLanguage.replace(`/enterprise/${version}/`, `/enterprise-server@${version}/`)
basicCorrection = `/${language}${withoutLanguage.replace(
`/enterprise/${version}/`,
`/enterprise-server@${version}/`,
)}`
}
} else if (withoutLanguage === '/enterprise') {
// E.g. `/enterprise` exactly
@@ -136,11 +139,9 @@ export default function getRedirect(uri: string, context: Context): string | und
// If the URL is without a language, and no redirect is necessary,
// but it has as version prefix, the language has to be there
// otherwise it will never be found in `req.context.pages`
basicCorrection =
`/${language}` +
withoutLanguage
.replace(`/enterprise/`, `/enterprise-server@${latest}/`)
.replace('/user/', '/')
basicCorrection = `/${language}${withoutLanguage
.replace(`/enterprise/`, `/enterprise-server@${latest}/`)
.replace('/user/', '/')}`
} else if (withoutLanguage.startsWith('/insights')) {
// E.g. '/insights/foo'
basicCorrection = uri.replace('/insights', `${language}/enterprise-server@${latest}/insights`)
@@ -171,7 +172,7 @@ export default function getRedirect(uri: string, context: Context): string | und
if (supported.includes(version) || version === 'latest') {
prefix = `/${majorVersion}@${version}`
suffix = '/' + split.slice(2).join('/')
suffix = `/${split.slice(2).join('/')}`
if (
suffix.includes('/user') ||
@@ -183,7 +184,7 @@ export default function getRedirect(uri: string, context: Context): string | und
} else {
// If version is not supported, we still need to set these values
prefix = `/${majorVersion}@${version}`
suffix = '/' + split.slice(2).join('/')
suffix = `/${split.slice(2).join('/')}`
}
const newURL = prefix + suffix
@@ -319,7 +320,7 @@ function tryReplacements(prefix: string, suffix: string, context: Context): stri
return false
}
const candidateAsRedirect = prefix + suffix
const candidateAsURL = '/en' + candidateAsRedirect
const candidateAsURL = `/en${candidateAsRedirect}`
return candidateAsRedirect in redirects || candidateAsURL in pages
}

View File

@@ -76,7 +76,7 @@ export default function handleRedirects(req: ExtendedRequest, res: Response, nex
// have to do this now because searchPath replacement changes the path as well as the query params
if (queryParams) {
queryParams = '?' + queryParams
queryParams = `?${queryParams}`
}
// remove query params temporarily so we can find the path in the redirects object

View File

@@ -106,8 +106,8 @@ function FineGrainedAccess({ progAccess }: FineGrainedProps) {
numPermissionSets === 0
? t('no_permission_sets')
: numPermissionSets > 1
? t('permission_sets') + ':'
: t('permission_set') + ':'
? `${t('permission_sets')}:`
: `${t('permission_set')}:`
const publicAccessMsg =
numPermissionSets === 0
? t('allows_public_read_access_no_permissions')

View File

@@ -33,7 +33,7 @@ export function RestRedirect() {
const params = new URLSearchParams(asPathQuery)
params.set('apiVersion', date)
const url = `/${router.locale}${asPathRoot}?${params}${hash ? '#' + hash : ''}`
const url = `/${router.locale}${asPathRoot}?${params}${hash ? `#${hash}` : ''}`
router.replace(url)
}
}, [router.asPath, currentVersion])

View File

@@ -29,48 +29,46 @@ log(chalk.white.bold(' npm run dev\n'))
log(chalk.green.bold.underline('REST docs script examples\n'))
log(chalk.green.bold(' Examples of ways you can build the REST docs locally:\n'))
log(
chalk.cyan.bold(' - REST All versions:') +
' ' +
chalk.magenta('npm run sync-rest && npm run dev'),
`${chalk.cyan.bold(' - REST All versions:')} ${chalk.magenta(
'npm run sync-rest && npm run dev',
)}`,
)
log(
chalk.cyan.bold(' - REST Dotcom only:') +
' ' +
chalk.magenta('npm run sync-rest -- --versions api.github.com && npm run dev'),
`${chalk.cyan.bold(' - REST Dotcom only:')} ${chalk.magenta(
'npm run sync-rest -- --versions api.github.com && npm run dev',
)}`,
)
log(
chalk.cyan.bold(' - REST Two versions:') +
' ' +
chalk.magenta('npm run sync-rest -- --versions ghes-3.7 ghes-3.8 && npm run dev'),
`${chalk.cyan.bold(' - REST Two versions:')} ${chalk.magenta(
'npm run sync-rest -- --versions ghes-3.7 ghes-3.8 && npm run dev',
)}`,
)
log(
chalk.cyan.bold(' - REST Dotcom and next calendar date version:') +
' ' +
chalk.magenta('npm run sync-rest -- --next --versions api.github.com && npm run dev'),
`${chalk.cyan.bold(' - REST Dotcom and next calendar date version:')} ${chalk.magenta(
'npm run sync-rest -- --next --versions api.github.com && npm run dev',
)}`,
)
log(
chalk.cyan.bold(' - REST Dotcom only, including unpublished operations:') +
' ' +
chalk.magenta(
'npm run sync-rest -- --versions api.github.com --include-unpublished && npm run dev',
),
`${chalk.cyan.bold(' - REST Dotcom only, including unpublished operations:')} ${chalk.magenta(
'npm run sync-rest -- --versions api.github.com --include-unpublished && npm run dev',
)}`,
)
log(chalk.green.bold.underline('\nWebhook docs script examples\n'))
log(chalk.green.bold(' Examples of ways you can build the Webhook docs locally:\n'))
log(
chalk.cyan.bold(' - Webhooks All versions:') +
' ' +
chalk.magenta('npm run sync-webhooks && npm run dev'),
`${chalk.cyan.bold(' - Webhooks All versions:')} ${chalk.magenta(
'npm run sync-webhooks && npm run dev',
)}`,
)
log(
chalk.cyan.bold(' - Webhooks Dotcom only:') +
' ' +
chalk.magenta('npm run sync-webhooks -- --versions api.github.com && npm run dev'),
`${chalk.cyan.bold(' - Webhooks Dotcom only:')} ${chalk.magenta(
'npm run sync-webhooks -- --versions api.github.com && npm run dev',
)}`,
)
log(
chalk.cyan.bold(' - Webhooks Two versions:') +
' ' +
chalk.magenta('npm run sync-webhooks -- --versions ghes-3.7 ghes-3.8 && npm run dev'),
`${chalk.cyan.bold(' - Webhooks Two versions:')} ${chalk.magenta(
'npm run sync-webhooks -- --versions ghes-3.7 ghes-3.8 && npm run dev',
)}`,
)
log(chalk.green.bold('\nFor more info and additional options, run:\n'))
log(chalk.white.bold(' npm run sync-rest -- --help'))

View File

@@ -72,11 +72,7 @@ export default async function getCodeSamples(operation: Operation): Promise<Merg
...example.request,
description:
count[example.request.description] > 1
? example.request.description +
' ' +
(i + 1) +
': Status Code ' +
example.response!.statusCode
? `${example.request.description} ${i + 1}: Status Code ${example.response!.statusCode}`
: example.request.description,
},
}))

View File

@@ -20,7 +20,7 @@ describe('anchor-redirect api', () => {
const sp = new URLSearchParams()
sp.set('path', path)
sp.set('hash', hash)
const res = await get('/api/anchor-redirect?' + sp)
const res = await get(`/api/anchor-redirect?${sp}`)
expect(res.statusCode).toBe(200)
const { to } = JSON.parse(res.body)
expect(to).toBe(value)
@@ -31,7 +31,7 @@ describe('anchor-redirect api', () => {
const hash = key.split('#')[1]
const sp = new URLSearchParams()
sp.set('hash', hash)
const res = await get('/api/anchor-redirect?' + sp)
const res = await get(`/api/anchor-redirect?${sp}`)
expect(res.statusCode).toBe(400)
})
test('errors when path is not passed', async () => {
@@ -40,14 +40,14 @@ describe('anchor-redirect api', () => {
const path = key.split('#')[0]
const sp = new URLSearchParams()
sp.set('path', path)
const res = await get('/api/anchor-redirect?' + sp)
const res = await get(`/api/anchor-redirect?${sp}`)
expect(res.statusCode).toBe(400)
})
test('unfound redirect returns undefined', async () => {
const sp = new URLSearchParams()
sp.set('path', 'foo')
sp.set('hash', 'bar')
const res = await get('/api/anchor-redirect?' + sp)
const res = await get(`/api/anchor-redirect?${sp}`)
const { to } = JSON.parse(res.body)
expect(to).toBe(undefined)
})
@@ -55,7 +55,7 @@ describe('anchor-redirect api', () => {
const sp = new URLSearchParams()
sp.set('path', 'foo')
sp.set('hash', 'bar')
const res = await get('/api/anchor-redirect?' + sp)
const res = await get(`/api/anchor-redirect?${sp}`)
expect(res.headers['cache-control']).toContain('public')
expect(res.headers['cache-control']).toMatch(/max-age=[1-9]/)
expect(res.headers['surrogate-control']).toContain('public')

View File

@@ -55,7 +55,7 @@ describe('rest example requests and responses', () => {
// example is any because getCodeSamples returns objects from untyped JavaScript module
mergedExamples.forEach((example: any, index: number) => {
expect(example.request.description).toBe(
'Example ' + (index + 1) + ': Status Code ' + example.response.statusCode,
`Example ${index + 1}: Status Code ${example.response.statusCode}`,
)
})
})

View File

@@ -109,7 +109,7 @@ describe('REST references docs', () => {
.text()
.trim()
if (apiVersion === allVersions[version].latestApiVersion) {
expect(versionName).toBe(apiVersion + ' (latest)')
expect(versionName).toBe(`${apiVersion} (latest)`)
} else {
expect(versionName).toBe(apiVersion)
}
@@ -148,12 +148,11 @@ describe('REST references docs', () => {
function formatErrors(differences: Record<string, any>): string {
let errorMessage = 'There are differences in Categories/Subcategories in:\n'
for (const schema in differences) {
errorMessage += 'Version: ' + schema + '\n'
errorMessage += `Version: ${schema}\n`
for (const category in differences[schema]) {
errorMessage += 'Category: ' + category + '\nSubcategories: \n'
errorMessage +=
' - content/rest directory: ' + differences[schema][category].contentDir + '\n'
errorMessage += ' - OpenAPI Schema: ' + differences[schema][category].openAPI + '\n'
errorMessage += `Category: ${category}\nSubcategories: \n`
errorMessage += ` - content/rest directory: ${differences[schema][category].contentDir}\n`
errorMessage += ` - OpenAPI Schema: ${differences[schema][category].openAPI}\n`
errorMessage += '---\n'
}
}

View File

@@ -299,7 +299,7 @@ export function SearchOverlay({
// When loading, capture the last height of the suggestions list so we can use it for the loading div
const previousSuggestionsListHeight = useMemo(() => {
if (generalSearchResults.length || aiAutocompleteOptions.length) {
return 7 * (generalSearchResults.length + aiAutocompleteOptions.length) + ''
return `${7 * (generalSearchResults.length + aiAutocompleteOptions.length)}`
} else {
return '150' // Default height for just 2 suggestions
}

View File

@@ -132,8 +132,7 @@ export const aiSearchProxy = async (req: ExtendedRequest, res: Response) => {
res.status(500).json({ errors: [{ message: 'Internal server error' }] })
} else {
// Send error message via the stream
const errorMessage =
JSON.stringify({ errors: [{ message: 'Internal server error' }] }) + '\n'
const errorMessage = `${JSON.stringify({ errors: [{ message: 'Internal server error' }] })}\n`
res.write(errorMessage)
res.end()
}

View File

@@ -4,7 +4,7 @@ export function safeUrlDisplay(url: string): string {
parsed.password = '***'
}
if (parsed.username) {
parsed.username = parsed.username.slice(0, 4) + '***'
parsed.username = `${parsed.username.slice(0, 4)}***`
}
return parsed.toString()
}

View File

@@ -29,7 +29,7 @@ export function utcTimestamp() {
d.getUTCSeconds(),
]
// If it's a number make it a zero-padding 2 character string
.map((x) => (typeof x === 'number' ? ('0' + x).slice(-2) : x))
.map((x) => (typeof x === 'number' ? `0${x}`.slice(-2) : x))
.join('')
)
}

View File

@@ -178,36 +178,35 @@ export default async function buildRecords(
// Report failed pages if any
if (failedPages.length > 0) {
console.log(
'\n' +
boxen(
chalk.bold.red(`${failedPages.length} page(s) failed to scrape\n\n`) +
failedPages
.slice(0, 10) // Show first 10 failures
.map((failure, idx) => {
return (
chalk.gray(`${idx + 1}. `) +
chalk.yellow(failure.errorType) +
'\n' +
(failure.relativePath
? chalk.cyan(` Path: `) + failure.relativePath + '\n'
: '') +
(failure.url ? chalk.cyan(` URL: `) + failure.url + '\n' : '') +
chalk.gray(` Error: ${failure.error}`)
)
})
.join('\n\n') +
(failedPages.length > 10
? `\n\n${chalk.gray(`... and ${failedPages.length - 10} more`)}`
: ''),
{
title: chalk.red('⚠ Failed Pages'),
padding: 1,
borderColor: 'yellow',
},
) +
'\n',
)
const failureCount = failedPages.length
const header = chalk.bold.red(`${failureCount} page(s) failed to scrape\n\n`)
const failureList = failedPages
.slice(0, 10) // Show first 10 failures
.map((failure, idx) => {
const number = chalk.gray(`${idx + 1}. `)
const errorType = chalk.yellow(failure.errorType)
const pathLine = failure.relativePath
? `\n${chalk.cyan(' Path: ')}${failure.relativePath}`
: ''
const urlLine = failure.url ? `\n${chalk.cyan(' URL: ')}${failure.url}` : ''
const errorLine = `\n${chalk.gray(` Error: ${failure.error}`)}`
return `${number}${errorType}${pathLine}${urlLine}${errorLine}`
})
.join('\n\n')
const remaining =
failureCount > 10 ? `\n\n${chalk.gray(`... and ${failureCount - 10} more`)}` : ''
const boxContent = header + failureList + remaining
const box = boxen(boxContent, {
title: chalk.red('⚠ Failed Pages'),
padding: 1,
borderColor: 'yellow',
})
console.log(`\n${box}\n`)
// Log suggestion
console.log(

View File

@@ -33,7 +33,7 @@ describeIfElasticsearchURL('search v1 middleware', () => {
// see src/search/tests/fixtures/search-indexes/github-docs-dotcom-en-records.json
// which clearly has a record with the title "Foo"
sp.set('query', 'foo')
const res = await get('/api/search/v1?' + sp.toString())
const res = await get(`/api/search/v1?${sp.toString()}`)
expect(res.statusCode).toBe(200)
const results: GeneralSearchResponse = JSON.parse(res.body)
@@ -75,7 +75,7 @@ describeIfElasticsearchURL('search v1 middleware', () => {
const sp = new URLSearchParams()
sp.set('query', 'foo')
sp.set('debug', '1') // Note!
const res = await get('/api/search/v1?' + sp.toString())
const res = await get(`/api/search/v1?${sp.toString()}`)
expect(res.statusCode).toBe(200)
const results: GeneralSearchResponse = JSON.parse(res.body)
// safe because we know exactly the fixtures
@@ -90,7 +90,7 @@ describeIfElasticsearchURL('search v1 middleware', () => {
{
const sp = new URLSearchParams()
sp.set('query', 'sill')
const res = await get('/api/search/v1?' + sp.toString())
const res = await get(`/api/search/v1?${sp.toString()}`)
expect(res.statusCode).toBe(200)
const results: GeneralSearchResponse = JSON.parse(res.body)
// Fixtures contains no word called 'sill'. It does contain the term
@@ -105,7 +105,7 @@ describeIfElasticsearchURL('search v1 middleware', () => {
const sp = new URLSearchParams()
sp.set('query', 'sill')
sp.set('autocomplete', 'true')
const res = await get('/api/search/v1?' + sp.toString())
const res = await get(`/api/search/v1?${sp.toString()}`)
expect(res.statusCode).toBe(200)
const results: GeneralSearchResponse = JSON.parse(res.body)
expect(results.meta.found.value).toBeGreaterThanOrEqual(1)
@@ -119,7 +119,7 @@ describeIfElasticsearchURL('search v1 middleware', () => {
test('find nothing', async () => {
const sp = new URLSearchParams()
sp.set('query', 'xojixjoiwejhfoiuwehjfioweufhj')
const res = await get('/api/search/v1?' + sp.toString())
const res = await get(`/api/search/v1?${sp.toString()}`)
expect(res.statusCode).toBe(200)
const results: GeneralSearchResponse = JSON.parse(res.body)
expect(results.hits.length).toBe(0)
@@ -130,7 +130,7 @@ describeIfElasticsearchURL('search v1 middleware', () => {
const sp = new URLSearchParams()
sp.set('query', 'introduction heading')
sp.append('highlights', 'content')
const res = await get('/api/search/v1?' + sp.toString())
const res = await get(`/api/search/v1?${sp.toString()}`)
expect(res.statusCode).toBe(200)
const results: GeneralSearchResponse = JSON.parse(res.body)
expect(results.meta.found.value).toBeGreaterThanOrEqual(1)
@@ -145,7 +145,7 @@ describeIfElasticsearchURL('search v1 middleware', () => {
// This will match because it's in the 'content' but not in 'headings'
sp.set('query', 'Fact of life')
sp.set('highlights', 'title')
const res = await get('/api/search/v1?' + sp.toString())
const res = await get(`/api/search/v1?${sp.toString()}`)
expect(res.statusCode).toBe(200)
const results: GeneralSearchResponse = JSON.parse(res.body)
expect(results.meta.found.value).toBeGreaterThanOrEqual(1)
@@ -159,12 +159,12 @@ describeIfElasticsearchURL('search v1 middleware', () => {
const sp = new URLSearchParams()
sp.set('query', 'foo')
sp.set('version', 'dotcom')
const res1 = await get('/api/search/v1?' + sp.toString())
const res1 = await get(`/api/search/v1?${sp.toString()}`)
expect(res1.statusCode).toBe(200)
const results1: GeneralSearchResponse = JSON.parse(res1.body)
sp.set('version', 'free-pro-team@latest')
const res2 = await get('/api/search/v1?' + sp.toString())
const res2 = await get(`/api/search/v1?${sp.toString()}`)
expect(res2.statusCode).toBe(200)
const results2: GeneralSearchResponse = JSON.parse(res2.body)
expect(results1.hits[0].id).toBe(results2.hits[0].id)
@@ -185,7 +185,7 @@ describeIfElasticsearchURL('search v1 middleware', () => {
{
const sp = new URLSearchParams()
sp.set('query', ' ')
const res = await get('/api/search/v1?' + sp.toString())
const res = await get(`/api/search/v1?${sp.toString()}`)
expect(res.statusCode).toBe(400)
const errorResponse = JSON.parse(res.body) as {
error: string
@@ -198,7 +198,7 @@ describeIfElasticsearchURL('search v1 middleware', () => {
const sp = new URLSearchParams()
sp.set('query', 'test')
sp.set('language', 'xxx')
const res = await get('/api/search/v1?' + sp.toString())
const res = await get(`/api/search/v1?${sp.toString()}`)
expect(res.statusCode).toBe(400)
const errorResponse = JSON.parse(res.body) as {
error: string
@@ -211,7 +211,7 @@ describeIfElasticsearchURL('search v1 middleware', () => {
const sp = new URLSearchParams()
sp.set('query', 'test')
sp.set('page', '9999')
const res = await get('/api/search/v1?' + sp.toString())
const res = await get(`/api/search/v1?${sp.toString()}`)
expect(res.statusCode).toBe(400)
const errorResponse = JSON.parse(res.body) as {
error: string
@@ -224,7 +224,7 @@ describeIfElasticsearchURL('search v1 middleware', () => {
const sp = new URLSearchParams()
sp.set('query', 'test')
sp.set('version', 'xxxxx')
const res = await get('/api/search/v1?' + sp.toString())
const res = await get(`/api/search/v1?${sp.toString()}`)
expect(res.statusCode).toBe(400)
const errorResponse = JSON.parse(res.body) as {
error: string
@@ -238,7 +238,7 @@ describeIfElasticsearchURL('search v1 middleware', () => {
const sp = new URLSearchParams()
sp.set('query', 'test')
sp.set('size', 'not a number')
const res = await get('/api/search/v1?' + sp.toString())
const res = await get(`/api/search/v1?${sp.toString()}`)
expect(res.statusCode).toBe(400)
const errorResponse = JSON.parse(res.body) as {
error: string
@@ -251,7 +251,7 @@ describeIfElasticsearchURL('search v1 middleware', () => {
const sp = new URLSearchParams()
sp.set('query', 'test')
sp.set('sort', 'neverheardof')
const res = await get('/api/search/v1?' + sp.toString())
const res = await get(`/api/search/v1?${sp.toString()}`)
expect(res.statusCode).toBe(400)
const errorResponse = JSON.parse(res.body) as {
error: string
@@ -264,7 +264,7 @@ describeIfElasticsearchURL('search v1 middleware', () => {
const sp = new URLSearchParams()
sp.set('query', 'test')
sp.set('highlights', 'neverheardof')
const res = await get('/api/search/v1?' + sp.toString())
const res = await get(`/api/search/v1?${sp.toString()}`)
expect(res.statusCode).toBe(400)
const errorResponse = JSON.parse(res.body) as {
error: string
@@ -277,7 +277,7 @@ describeIfElasticsearchURL('search v1 middleware', () => {
const sp = new URLSearchParams()
sp.append('query', 'test1')
sp.append('query', 'test2')
const res = await get('/api/search/v1?' + sp.toString())
const res = await get(`/api/search/v1?${sp.toString()}`)
expect(res.statusCode).toBe(400)
const errorResponse = JSON.parse(res.body) as {
error: string
@@ -290,7 +290,7 @@ describeIfElasticsearchURL('search v1 middleware', () => {
test('breadcrumbless records should always return a string', async () => {
const sp = new URLSearchParams()
sp.set('query', 'breadcrumbs')
const res = await get('/api/search/v1?' + sp.toString())
const res = await get(`/api/search/v1?${sp.toString()}`)
expect(res.statusCode).toBe(200)
const results: GeneralSearchResponse = JSON.parse(res.body)
// safe because we know exactly the fixtures
@@ -305,7 +305,7 @@ describeIfElasticsearchURL("additional fields with 'include'", () => {
test("'intro' and 'headings' are omitted by default", async () => {
const sp = new URLSearchParams()
sp.set('query', 'foo')
const res = await get('/api/search/v1?' + sp.toString())
const res = await get(`/api/search/v1?${sp.toString()}`)
expect(res.statusCode).toBe(200)
const results: GeneralSearchResponse = JSON.parse(res.body)
const firstKeys = Object.keys(results.hits[0])
@@ -317,7 +317,7 @@ describeIfElasticsearchURL("additional fields with 'include'", () => {
const sp = new URLSearchParams()
sp.set('query', 'foo')
sp.set('include', 'intro')
const res = await get('/api/search/v1?' + sp.toString())
const res = await get(`/api/search/v1?${sp.toString()}`)
expect(res.statusCode).toBe(200)
const results: GeneralSearchResponse = JSON.parse(res.body)
const firstKeys = Object.keys(results.hits[0])
@@ -330,7 +330,7 @@ describeIfElasticsearchURL("additional fields with 'include'", () => {
sp.set('query', 'foo')
sp.append('include', 'intro')
sp.append('include', 'headings')
const res = await get('/api/search/v1?' + sp.toString())
const res = await get(`/api/search/v1?${sp.toString()}`)
expect(res.statusCode).toBe(200)
const results: GeneralSearchResponse = JSON.parse(res.body)
const firstKeys = Object.keys(results.hits[0])
@@ -342,7 +342,7 @@ describeIfElasticsearchURL("additional fields with 'include'", () => {
const sp = new URLSearchParams()
sp.set('query', 'foo')
sp.set('include', 'xxxxx')
const res = await get('/api/search/v1?' + sp.toString())
const res = await get(`/api/search/v1?${sp.toString()}`)
expect(res.statusCode).toBe(400)
const results = JSON.parse(res.body) as {
error: string
@@ -359,7 +359,7 @@ describeIfElasticsearchURL('filter by toplevel', () => {
const sp = new URLSearchParams()
sp.set('query', 'foo')
sp.set('include', 'toplevel')
const res = await get('/api/search/v1?' + sp.toString())
const res = await get(`/api/search/v1?${sp.toString()}`)
expect(res.statusCode).toBe(200)
const results: GeneralSearchResponse = JSON.parse(res.body)
// In the fixtures, there are two distinct `toplevel` that
@@ -373,7 +373,7 @@ describeIfElasticsearchURL('filter by toplevel', () => {
sp.set('query', 'foo')
sp.set('include', 'toplevel')
sp.set('toplevel', 'Baring')
const res = await get('/api/search/v1?' + sp.toString())
const res = await get(`/api/search/v1?${sp.toString()}`)
expect(res.statusCode).toBe(200)
const results: GeneralSearchResponse = JSON.parse(res.body)
const toplevels = new Set(results.hits.map((hit) => hit.toplevel))
@@ -386,7 +386,7 @@ describeIfElasticsearchURL('filter by toplevel', () => {
sp.set('include', 'toplevel')
sp.append('toplevel', 'Baring')
sp.append('toplevel', 'Fooing')
const res = await get('/api/search/v1?' + sp.toString())
const res = await get(`/api/search/v1?${sp.toString()}`)
expect(res.statusCode).toBe(200)
const results: GeneralSearchResponse = JSON.parse(res.body)
const toplevels = new Set(results.hits.map((hit) => hit.toplevel))
@@ -398,7 +398,7 @@ describeIfElasticsearchURL('filter by toplevel', () => {
sp.set('query', 'foo')
sp.set('include', 'toplevel')
sp.set('toplevel', 'Never heard of')
const res = await get('/api/search/v1?' + sp.toString())
const res = await get(`/api/search/v1?${sp.toString()}`)
expect(res.statusCode).toBe(200)
const results: GeneralSearchResponse = JSON.parse(res.body)
expect(results.meta.found.value).toBe(0)
@@ -412,7 +412,7 @@ describeIfElasticsearchURL('aggregate', () => {
const sp = new URLSearchParams()
sp.set('query', 'foo')
sp.set('aggregate', 'toplevel')
const res = await get('/api/search/v1?' + sp.toString())
const res = await get(`/api/search/v1?${sp.toString()}`)
expect(res.statusCode).toBe(200)
const results: GeneralSearchResponse & { aggregations?: SearchResultAggregations } = JSON.parse(
res.body,
@@ -428,7 +428,7 @@ describeIfElasticsearchURL('aggregate', () => {
const sp = new URLSearchParams()
sp.set('query', 'foo')
sp.set('aggregate', 'unrecognizedxxx')
const res = await get('/api/search/v1?' + sp.toString())
const res = await get(`/api/search/v1?${sp.toString()}`)
expect(res.statusCode).toBe(400)
const results = JSON.parse(res.body) as {
error: string

View File

@@ -14,7 +14,7 @@ describe('webhooks v1 middleware', () => {
// field which all webhook types should have.
sp.set('category', 'branch_protection_rule')
sp.set('version', 'free-pro-team@latest')
const res = await get('/api/webhooks/v1?' + sp)
const res = await get(`/api/webhooks/v1?${sp}`)
expect(res.statusCode).toBe(200)
const results = JSON.parse(res.body)
const actionTypes = Object.keys(results)
@@ -36,7 +36,7 @@ describe('webhooks v1 middleware', () => {
const sp = new URLSearchParams()
sp.set('category', 'branch_protection_rule')
sp.set('version', 'enterprise-cloud@latest')
const res = await get('/api/webhooks/v1?' + sp)
const res = await get(`/api/webhooks/v1?${sp}`)
expect(res.statusCode).toBe(200)
const results = JSON.parse(res.body)
const actionTypes = Object.keys(results)
@@ -50,7 +50,7 @@ describe('webhooks v1 middleware', () => {
const sp = new URLSearchParams()
sp.set('category', 'no-such-category')
sp.set('version', 'free-pro-team@latest')
const res = await get('/api/webhooks/v1?' + sp)
const res = await get(`/api/webhooks/v1?${sp}`)
expect(res.statusCode).toBe(404)
expect(JSON.parse(res.body).error).toBeTruthy()
@@ -60,7 +60,7 @@ describe('webhooks v1 middleware', () => {
const sp = new URLSearchParams()
sp.set('category', 'branch_protection_rule')
sp.set('version', 'no-such-version')
const res = await get('/api/webhooks/v1?' + sp)
const res = await get(`/api/webhooks/v1?${sp}`)
expect(res.statusCode).toBe(404)
expect(JSON.parse(res.body).error).toBeTruthy()

View File

@@ -167,13 +167,13 @@ async function main(owner: string, repo: string, baseSHA: string, headSHA: strin
`| ${headings.map((heading) => `**${heading}**`).join(' | ')} |`,
`| ${headings.map(() => ':---').join(' | ')} |`,
]
let markdownTable = markdownTableHead.join('\n') + '\n'
let markdownTable = `${markdownTableHead.join('\n')}\n`
for (const filteredLine of filteredLines) {
if ((markdownTable + filteredLine).length > MAX_COMMENT_SIZE) {
markdownTable += '\n**Note** There are more changes in this PR than we can show.'
break
}
markdownTable += filteredLine + '\n'
markdownTable += `${filteredLine}\n`
}
return markdownTable

View File

@@ -37,12 +37,13 @@ async function main() {
}
const graph: Record<string, any> = await github.graphql(mutation, variables)
console.log('GraphQL mutation result:\n' + JSON.stringify(graph))
console.log(`GraphQL mutation result:\n${JSON.stringify(graph)}`)
if (graph.errors && graph.errors.length > 0) {
console.error(
'ERROR! Failed to enable auto-merge:\n - ' +
graph.errors.map((error: any) => error.message).join('\n - '),
`ERROR! Failed to enable auto-merge:\n - ${graph.errors
.map((error: any) => error.message)
.join('\n - ')}`,
)
} else {
console.log('Auto-merge enabled!')

View File

@@ -118,7 +118,7 @@ Note: Requires a local server running on localhost:4000 (npm start)
const report = generateReport(results)
// Always output to console for local development
console.log('\n' + report)
console.log(`\n${report}`)
// If running in CI, also save report for commenting on PR
if (process.env.GITHUB_ACTIONS) {
@@ -198,12 +198,10 @@ async function waitForServer(): Promise<void> {
async function analyzeFile(filePath: string): Promise<PageReadability | null> {
// Convert file path to URL path
// content/get-started/foo.md -> /get-started/foo
const urlPath =
'/' +
filePath
.replace(/^content\//, '')
.replace(/\.md$/, '')
.replace(/\/index$/, '')
const urlPath = `/${filePath
.replace(/^content\//, '')
.replace(/\.md$/, '')
.replace(/\/index$/, '')}`
try {
// Fetch the rendered page