Fix all no-shadow ESLint violations (#58234)
This commit is contained in:
@@ -97,7 +97,6 @@ export default [
|
||||
|
||||
// Disabled rules to review
|
||||
'@typescript-eslint/ban-ts-comment': 'off', // 50+
|
||||
'no-shadow': 'off', // 150+
|
||||
'github/array-foreach': 'off', // 250+
|
||||
'no-console': 'off', // 800+
|
||||
'@typescript-eslint/no-explicit-any': 'off', // 1000+
|
||||
|
||||
@@ -49,11 +49,11 @@ const config: NextConfig = {
|
||||
}
|
||||
})
|
||||
},
|
||||
webpack: (config) => {
|
||||
config.experiments = config.experiments || {}
|
||||
config.experiments.topLevelAwait = true
|
||||
config.resolve.fallback = { fs: false, async_hooks: false }
|
||||
return config
|
||||
webpack: (webpackConfig) => {
|
||||
webpackConfig.experiments = webpackConfig.experiments || {}
|
||||
webpackConfig.experiments.topLevelAwait = true
|
||||
webpackConfig.resolve.fallback = { fs: false, async_hooks: false }
|
||||
return webpackConfig
|
||||
},
|
||||
|
||||
// https://nextjs.org/docs/api-reference/next.config.js/compression
|
||||
|
||||
@@ -143,9 +143,9 @@ export default async function dynamicAssets(
|
||||
const buffer = await image.webp({ effort }).toBuffer()
|
||||
assetCacheControl(res)
|
||||
return res.type('image/webp').send(buffer)
|
||||
} catch (error) {
|
||||
if (error instanceof Error && (error as any).code !== 'ENOENT') {
|
||||
throw error
|
||||
} catch (catchError) {
|
||||
if (catchError instanceof Error && (catchError as any).code !== 'ENOENT') {
|
||||
throw catchError
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,13 +16,13 @@ function getNextStaticAsset(directory: string) {
|
||||
return path.join(root, files[0])
|
||||
}
|
||||
|
||||
function mockRequest(path: string, { headers }: { headers?: Record<string, string> } = {}) {
|
||||
function mockRequest(requestPath: string, { headers }: { headers?: Record<string, string> } = {}) {
|
||||
const _headers = Object.fromEntries(
|
||||
Object.entries(headers || {}).map(([key, value]) => [key.toLowerCase(), value]),
|
||||
)
|
||||
return {
|
||||
path,
|
||||
url: path,
|
||||
path: requestPath,
|
||||
url: requestPath,
|
||||
get: (header: string) => {
|
||||
return _headers[header.toLowerCase()]
|
||||
},
|
||||
@@ -74,8 +74,8 @@ const mockResponse = () => {
|
||||
if (typeof key === 'string') {
|
||||
res.headers[key.toLowerCase()] = value
|
||||
} else {
|
||||
for (const [k, value] of Object.entries(key)) {
|
||||
res.headers[k.toLowerCase()] = value
|
||||
for (const [k, v] of Object.entries(key)) {
|
||||
res.headers[k.toLowerCase()] = v
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -319,9 +319,9 @@ describe('archived enterprise static assets', () => {
|
||||
},
|
||||
])(
|
||||
'should return $expectStatus for $name',
|
||||
({ name, path, referrer, expectStatus, shouldCallNext }) => {
|
||||
({ name, path: testPath, referrer, expectStatus, shouldCallNext }) => {
|
||||
test(name, async () => {
|
||||
const req = mockRequest(path, {
|
||||
const req = mockRequest(testPath, {
|
||||
headers: {
|
||||
Referrer: referrer,
|
||||
},
|
||||
@@ -359,22 +359,25 @@ describe('archived enterprise static assets', () => {
|
||||
expectStatus: undefined,
|
||||
shouldCallNext: true,
|
||||
},
|
||||
])('should not suppress $name', ({ name, path, referrer, expectStatus, shouldCallNext }) => {
|
||||
test(name, async () => {
|
||||
const req = mockRequest(path, {
|
||||
headers: {
|
||||
Referrer: referrer,
|
||||
},
|
||||
])(
|
||||
'should not suppress $name',
|
||||
({ name, path: testPath, referrer, expectStatus, shouldCallNext }) => {
|
||||
test(name, async () => {
|
||||
const req = mockRequest(testPath, {
|
||||
headers: {
|
||||
Referrer: referrer,
|
||||
},
|
||||
})
|
||||
const res = mockResponse()
|
||||
let nexted = false
|
||||
const next = () => {
|
||||
nexted = true
|
||||
}
|
||||
setDefaultFastlySurrogateKey(req, res, () => {})
|
||||
await archivedEnterpriseVersionsAssets(req as any, res as any, next)
|
||||
expect(nexted).toBe(shouldCallNext)
|
||||
expect(res.statusCode).toBe(expectStatus)
|
||||
})
|
||||
const res = mockResponse()
|
||||
let nexted = false
|
||||
const next = () => {
|
||||
nexted = true
|
||||
}
|
||||
setDefaultFastlySurrogateKey(req, res, () => {})
|
||||
await archivedEnterpriseVersionsAssets(req as any, res as any, next)
|
||||
expect(nexted).toBe(shouldCallNext)
|
||||
expect(res.statusCode).toBe(expectStatus)
|
||||
})
|
||||
})
|
||||
},
|
||||
)
|
||||
})
|
||||
|
||||
@@ -83,8 +83,8 @@ async function setupEnvironment() {
|
||||
|
||||
// copy the raw rst files to the temp directory and convert them
|
||||
// to Markdownusing pandoc
|
||||
async function rstToMarkdown(sourceDirectory: string) {
|
||||
const sourceFiles = walk(sourceDirectory, {
|
||||
async function rstToMarkdown(rstSourceDirectory: string) {
|
||||
const sourceFiles = walk(rstSourceDirectory, {
|
||||
includeBasePath: true,
|
||||
globs: ['**/*.rst'],
|
||||
})
|
||||
|
||||
@@ -80,15 +80,15 @@ export async function getLintableYml(dataFilePath: string): Promise<Record<strin
|
||||
// back to a file in the data directory.
|
||||
// The resulting key looks like:
|
||||
// 'data/variables/product.yml /pat_v1_caps'
|
||||
function addPathToKey(mdDict: Map<string, string>, dataFilePath: string): Map<string, string> {
|
||||
const keys = Array.from(mdDict.keys())
|
||||
function addPathToKey(mdDictMap: Map<string, string>, dataFilePath: string): Map<string, string> {
|
||||
const keys = Array.from(mdDictMap.keys())
|
||||
keys.forEach((key) => {
|
||||
const newKey = `${dataFilePath} ${key}`
|
||||
const value = mdDict.get(key)
|
||||
const value = mdDictMap.get(key)
|
||||
if (value !== undefined) {
|
||||
mdDict.delete(key)
|
||||
mdDict.set(newKey, value)
|
||||
mdDictMap.delete(key)
|
||||
mdDictMap.set(newKey, value)
|
||||
}
|
||||
})
|
||||
return mdDict
|
||||
return mdDictMap
|
||||
}
|
||||
|
||||
@@ -45,7 +45,7 @@ export const frontmatterHeroImage: Rule = {
|
||||
|
||||
// Check if heroImage is an absolute path
|
||||
if (!heroImage.startsWith('/')) {
|
||||
const line = params.lines.find((line: string) => line.trim().startsWith('heroImage:'))
|
||||
const line = params.lines.find((ln: string) => ln.trim().startsWith('heroImage:'))
|
||||
const lineNumber = line ? params.lines.indexOf(line) + 1 : 1
|
||||
addError(
|
||||
onError,
|
||||
@@ -59,7 +59,7 @@ export const frontmatterHeroImage: Rule = {
|
||||
|
||||
// Check if heroImage points to banner-images directory
|
||||
if (!heroImage.startsWith('/assets/images/banner-images/')) {
|
||||
const line = params.lines.find((line: string) => line.trim().startsWith('heroImage:'))
|
||||
const line = params.lines.find((ln: string) => ln.trim().startsWith('heroImage:'))
|
||||
const lineNumber = line ? params.lines.indexOf(line) + 1 : 1
|
||||
addError(
|
||||
onError,
|
||||
@@ -74,7 +74,7 @@ export const frontmatterHeroImage: Rule = {
|
||||
// Check if the file actually exists
|
||||
const validHeroImages = getValidHeroImages()
|
||||
if (validHeroImages.length > 0 && !validHeroImages.includes(heroImage)) {
|
||||
const line = params.lines.find((line: string) => line.trim().startsWith('heroImage:'))
|
||||
const line = params.lines.find((ln: string) => ln.trim().startsWith('heroImage:'))
|
||||
const lineNumber = line ? params.lines.indexOf(line) + 1 : 1
|
||||
const availableImages = validHeroImages.join(', ')
|
||||
addError(
|
||||
|
||||
@@ -48,8 +48,8 @@ export const frontmatterIntroLinks: Rule = {
|
||||
for (const key of Object.keys(introLinks)) {
|
||||
if (!validKeys.includes(key)) {
|
||||
// Find the line with this key
|
||||
const line = params.lines.find((line: string) => {
|
||||
const trimmed = line.trim()
|
||||
const line = params.lines.find((ln: string) => {
|
||||
const trimmed = ln.trim()
|
||||
return trimmed.startsWith(`${key}:`) && !trimmed.startsWith('introLinks:')
|
||||
})
|
||||
const lineNumber = line ? params.lines.indexOf(line) + 1 : 1
|
||||
|
||||
@@ -24,7 +24,7 @@ export const frontmatterSchema: Rule = {
|
||||
for (const key of deprecatedKeys) {
|
||||
// Early access articles are allowed to have deprecated properties
|
||||
if (params.name.includes('early-access')) continue
|
||||
const line = params.lines.find((line: string) => line.trim().startsWith(key))
|
||||
const line = params.lines.find((ln: string) => ln.trim().startsWith(key))
|
||||
const lineNumber = params.lines.indexOf(line!) + 1
|
||||
addError(
|
||||
onError,
|
||||
|
||||
@@ -262,7 +262,7 @@ async function main() {
|
||||
}
|
||||
|
||||
const fixableFiles = Object.entries(formattedResults)
|
||||
.filter(([, results]) => results.some((result) => result.fixable))
|
||||
.filter(([, fileResults]) => fileResults.some((flaw) => flaw.fixable))
|
||||
.map(([file]) => file)
|
||||
if (fixableFiles.length) {
|
||||
console.log('') // Just for some whitespace before the next message
|
||||
@@ -302,7 +302,7 @@ function pluralize(things, word, pluralForm = null) {
|
||||
// (e.g., heading linters) so we need to separate the
|
||||
// list of data files from all other files to run
|
||||
// through markdownlint individually
|
||||
function getFilesToLint(paths) {
|
||||
function getFilesToLint(inputPaths) {
|
||||
const fileList = {
|
||||
length: 0,
|
||||
content: [],
|
||||
@@ -316,7 +316,7 @@ function getFilesToLint(paths) {
|
||||
// The path passed to Markdownlint is what is displayed
|
||||
// in the error report, so we want to normalize it and
|
||||
// and make it relative if it's absolute.
|
||||
for (const rawPath of paths) {
|
||||
for (const rawPath of inputPaths) {
|
||||
const absPath = path.resolve(rawPath)
|
||||
if (fs.statSync(rawPath).isDirectory()) {
|
||||
if (isInDir(absPath, contentDir)) {
|
||||
@@ -427,16 +427,16 @@ function reportSummaryByRule(results, config) {
|
||||
result. Results are sorted by severity per file, with errors
|
||||
listed first then warnings.
|
||||
*/
|
||||
function getFormattedResults(allResults, isPrecommit) {
|
||||
function getFormattedResults(allResults, isInPrecommitMode) {
|
||||
const output = {}
|
||||
Object.entries(allResults)
|
||||
// Each result key always has an array value, but it may be empty
|
||||
.filter(([, results]) => results.length)
|
||||
.forEach(([key, results]) => {
|
||||
.forEach(([key, fileResults]) => {
|
||||
if (verbose) {
|
||||
output[key] = [...results]
|
||||
output[key] = [...fileResults]
|
||||
} else {
|
||||
const formattedResults = results.map((flaw) => formatResult(flaw, isPrecommit))
|
||||
const formattedResults = fileResults.map((flaw) => formatResult(flaw, isInPrecommitMode))
|
||||
|
||||
// Only add the file to output if there are results after filtering
|
||||
if (formattedResults.length > 0) {
|
||||
@@ -465,8 +465,8 @@ function getErrorCountByFile(results, fixed = false) {
|
||||
return getCountBySeverity(results, 'error', fixed)
|
||||
}
|
||||
function getCountBySeverity(results, severityLookup, fixed) {
|
||||
return Object.values(results).filter((results) =>
|
||||
results.some((result) => {
|
||||
return Object.values(results).filter((fileResults) =>
|
||||
fileResults.some((result) => {
|
||||
// If --fix was applied, we don't want to know about files that
|
||||
// no longer have errors or warnings.
|
||||
return result.severity === severityLookup && (!fixed || !result.fixable)
|
||||
@@ -477,7 +477,7 @@ function getCountBySeverity(results, severityLookup, fixed) {
|
||||
// Removes null values and properties that are not relevant to content
|
||||
// writers, adds the severity to each result object, and transforms
|
||||
// some error and fix data into a more readable format.
|
||||
function formatResult(object, isPrecommit) {
|
||||
function formatResult(object, isInPrecommitMode) {
|
||||
const formattedResult = {}
|
||||
|
||||
// Add severity to each result object
|
||||
@@ -486,7 +486,8 @@ function formatResult(object, isPrecommit) {
|
||||
throw new Error(`Rule not found in allConfig: '${ruleName}'`)
|
||||
}
|
||||
formattedResult.severity =
|
||||
allConfig[ruleName].severity || getSearchReplaceRuleSeverity(ruleName, object, isPrecommit)
|
||||
allConfig[ruleName].severity ||
|
||||
getSearchReplaceRuleSeverity(ruleName, object, isInPrecommitMode)
|
||||
|
||||
formattedResult.context = allConfig[ruleName].context || ''
|
||||
|
||||
@@ -540,7 +541,7 @@ function listRules() {
|
||||
Rules that can't be run on partials have the property
|
||||
`partial-markdown-files` set to false.
|
||||
*/
|
||||
function getMarkdownLintConfig(errorsOnly, runRules) {
|
||||
function getMarkdownLintConfig(filterErrorsOnly, runRules) {
|
||||
const config = {
|
||||
content: structuredClone(defaultConfig),
|
||||
data: structuredClone(defaultConfig),
|
||||
@@ -559,7 +560,7 @@ function getMarkdownLintConfig(errorsOnly, runRules) {
|
||||
// search-replace is handled differently than other rules because
|
||||
// it has nested metadata and rules.
|
||||
if (
|
||||
errorsOnly &&
|
||||
filterErrorsOnly &&
|
||||
getRuleSeverity(ruleConfig, isPrecommit) !== 'error' &&
|
||||
ruleName !== 'search-replace'
|
||||
) {
|
||||
@@ -585,7 +586,7 @@ function getMarkdownLintConfig(errorsOnly, runRules) {
|
||||
|
||||
for (const searchRule of ruleConfig.rules) {
|
||||
const searchRuleSeverity = getRuleSeverity(searchRule, isPrecommit)
|
||||
if (errorsOnly && searchRuleSeverity !== 'error') continue
|
||||
if (filterErrorsOnly && searchRuleSeverity !== 'error') continue
|
||||
// Add search-replace rules to frontmatter configuration for rules that make sense in frontmatter
|
||||
// This ensures rules like TODOCS detection work in frontmatter
|
||||
// Rules with applyToFrontmatter should ONLY run in the frontmatter pass (which lints the entire file)
|
||||
@@ -640,14 +641,16 @@ function getMarkdownLintConfig(errorsOnly, runRules) {
|
||||
// Return the severity value of a rule but keep in mind it could be
|
||||
// running as a precommit hook, which means the severity could be
|
||||
// deliberately different.
|
||||
function getRuleSeverity(rule, isPrecommit) {
|
||||
return isPrecommit ? rule.precommitSeverity || rule.severity : rule.severity
|
||||
function getRuleSeverity(ruleConfig, isInPrecommitMode) {
|
||||
return isInPrecommitMode
|
||||
? ruleConfig.precommitSeverity || ruleConfig.severity
|
||||
: ruleConfig.severity
|
||||
}
|
||||
|
||||
// Gets a custom rule function from the name of the rule
|
||||
// in the configuration file
|
||||
function getCustomRule(ruleName) {
|
||||
const rule = customRules.find((rule) => rule.names.includes(ruleName))
|
||||
const rule = customRules.find((r) => r.names.includes(ruleName))
|
||||
if (!rule)
|
||||
throw new Error(
|
||||
`A content-lint rule ('${ruleName}') is configured in the markdownlint config file but does not have a corresponding rule function.`,
|
||||
@@ -696,24 +699,24 @@ export function shouldIncludeRule(ruleName, runRules) {
|
||||
fixInfo: null
|
||||
}
|
||||
*/
|
||||
function getSearchReplaceRuleSeverity(ruleName, object, isPrecommit) {
|
||||
function getSearchReplaceRuleSeverity(ruleName, object, isInPrecommitMode) {
|
||||
const pluginRuleName = object.errorDetail.split(':')[0].trim()
|
||||
const rule = allConfig[ruleName].rules.find((rule) => rule.name === pluginRuleName)
|
||||
return isPrecommit ? rule.precommitSeverity || rule.severity : rule.severity
|
||||
const rule = allConfig[ruleName].rules.find((r) => r.name === pluginRuleName)
|
||||
return isInPrecommitMode ? rule.precommitSeverity || rule.severity : rule.severity
|
||||
}
|
||||
|
||||
function isOptionsValid() {
|
||||
// paths should only contain existing files and directories
|
||||
const paths = program.opts().paths || []
|
||||
for (const path of paths) {
|
||||
const optionPaths = program.opts().paths || []
|
||||
for (const filePath of optionPaths) {
|
||||
try {
|
||||
fs.statSync(path)
|
||||
fs.statSync(filePath)
|
||||
} catch {
|
||||
if ('paths'.includes(path)) {
|
||||
if ('paths'.includes(filePath)) {
|
||||
console.log('error: did you mean --paths')
|
||||
} else {
|
||||
console.log(
|
||||
`error: invalid --paths (-p) option. The value '${path}' is not a valid file or directory`,
|
||||
`error: invalid --paths (-p) option. The value '${filePath}' is not a valid file or directory`,
|
||||
)
|
||||
}
|
||||
return false
|
||||
@@ -722,14 +725,14 @@ function isOptionsValid() {
|
||||
|
||||
// rules should only contain existing, correctly spelled rules
|
||||
const allRulesList = [...allRules.map((rule) => rule.names).flat(), ...Object.keys(allConfig)]
|
||||
const rules = program.opts().rules || []
|
||||
for (const rule of rules) {
|
||||
if (!allRulesList.includes(rule)) {
|
||||
if ('rules'.includes(rule)) {
|
||||
const optionRules = program.opts().rules || []
|
||||
for (const ruleName of optionRules) {
|
||||
if (!allRulesList.includes(ruleName)) {
|
||||
if ('rules'.includes(ruleName)) {
|
||||
console.log('error: did you mean --rules')
|
||||
} else {
|
||||
console.log(
|
||||
`error: invalid --rules (-r) option. The value '${rule}' is not a valid rule name.`,
|
||||
`error: invalid --rules (-r) option. The value '${ruleName}' is not a valid rule name.`,
|
||||
)
|
||||
}
|
||||
return false
|
||||
|
||||
@@ -53,10 +53,10 @@ export function prettyPrintResults(
|
||||
let ruleDescription = ''
|
||||
|
||||
const errorDetailsByDescription = new Map()
|
||||
for (const { errorDetail, ruleDescription } of sorted) {
|
||||
const details = errorDetailsByDescription.get(ruleDescription) || new Set()
|
||||
for (const { errorDetail, ruleDescription: ruleDesc } of sorted) {
|
||||
const details = errorDetailsByDescription.get(ruleDesc) || new Set()
|
||||
details.add(errorDetail)
|
||||
errorDetailsByDescription.set(ruleDescription, details)
|
||||
errorDetailsByDescription.set(ruleDesc, details)
|
||||
}
|
||||
|
||||
for (const result of sorted) {
|
||||
|
||||
@@ -98,10 +98,10 @@ describe.skip('category pages', () => {
|
||||
const indexContents = await fs.promises.readFile(indexAbsPath, 'utf8')
|
||||
const parsed = matter(indexContents)
|
||||
if (!parsed.data) throw new Error('No frontmatter')
|
||||
const data = parsed.data as MarkdownFrontmatter
|
||||
categoryVersions = getApplicableVersions(data.versions, indexAbsPath)
|
||||
allowTitleToDifferFromFilename = data.allowTitleToDifferFromFilename
|
||||
const articleLinks = data.children.filter((child) => {
|
||||
const categoryData = parsed.data as MarkdownFrontmatter
|
||||
categoryVersions = getApplicableVersions(categoryData.versions, indexAbsPath)
|
||||
allowTitleToDifferFromFilename = categoryData.allowTitleToDifferFromFilename
|
||||
const articleLinks = categoryData.children.filter((child) => {
|
||||
const mdPath = getPath(productDir, indexLink, child)
|
||||
const fileExists = fs.existsSync(mdPath)
|
||||
return fileExists && fs.statSync(mdPath).isFile()
|
||||
@@ -137,10 +137,10 @@ describe.skip('category pages', () => {
|
||||
articleLinks.map(async (articleLink) => {
|
||||
const articlePath = getPath(productDir, indexLink, articleLink)
|
||||
const articleContents = await fs.promises.readFile(articlePath, 'utf8')
|
||||
const data = getFrontmatterData(articleContents)
|
||||
const articleData = getFrontmatterData(articleContents)
|
||||
|
||||
// Do not include subcategories in list of published articles
|
||||
if (data.subcategory || data.hidden) return null
|
||||
if (articleData.subcategory || articleData.hidden) return null
|
||||
|
||||
// ".../content/github/{category}/{article}.md" => "/{article}"
|
||||
return `/${path.relative(categoryDir, articlePath).replace(/\.md$/, '')}`
|
||||
@@ -159,10 +159,10 @@ describe.skip('category pages', () => {
|
||||
await Promise.all(
|
||||
childFilePaths.map(async (articlePath) => {
|
||||
const articleContents = await fs.promises.readFile(articlePath, 'utf8')
|
||||
const data = getFrontmatterData(articleContents)
|
||||
const availableArticleData = getFrontmatterData(articleContents)
|
||||
|
||||
// Do not include subcategories nor hidden pages in list of available articles
|
||||
if (data.subcategory || data.hidden) return null
|
||||
if (availableArticleData.subcategory || availableArticleData.hidden) return null
|
||||
|
||||
// ".../content/github/{category}/{article}.md" => "/{article}"
|
||||
return `/${path.relative(categoryDir, articlePath).replace(/\.md$/, '')}`
|
||||
@@ -173,10 +173,10 @@ describe.skip('category pages', () => {
|
||||
await Promise.all(
|
||||
childFilePaths.map(async (articlePath) => {
|
||||
const articleContents = await fs.promises.readFile(articlePath, 'utf8')
|
||||
const data = getFrontmatterData(articleContents)
|
||||
const versionData = getFrontmatterData(articleContents)
|
||||
|
||||
articleVersions[articlePath] = getApplicableVersions(
|
||||
data.versions,
|
||||
versionData.versions,
|
||||
articlePath,
|
||||
) as string[]
|
||||
}),
|
||||
@@ -196,8 +196,8 @@ describe.skip('category pages', () => {
|
||||
})
|
||||
|
||||
test('contains only articles and subcategories with versions that are also available in the parent category', () => {
|
||||
Object.entries(articleVersions).forEach(([articleName, articleVersions]) => {
|
||||
const unexpectedVersions = difference(articleVersions, categoryVersions)
|
||||
Object.entries(articleVersions).forEach(([articleName, versions]) => {
|
||||
const unexpectedVersions = difference(versions, categoryVersions)
|
||||
const errorMessage = `${articleName} has versions that are not available in parent category`
|
||||
expect(unexpectedVersions.length, errorMessage).toBe(0)
|
||||
})
|
||||
|
||||
@@ -24,10 +24,10 @@ describe('front matter', () => {
|
||||
}
|
||||
// Using any type because trouble array contains objects with varying error properties
|
||||
const nonWarnings = trouble.filter((t: any) => !t.warning)
|
||||
for (const { uri, index, redirects } of nonWarnings) {
|
||||
for (const { uri, index, redirects: redirectTo } of nonWarnings) {
|
||||
customErrorMessage += `\nindex: ${index} URI: ${uri}`
|
||||
if (redirects) {
|
||||
customErrorMessage += `\n\tredirects to ${redirects}`
|
||||
if (redirectTo) {
|
||||
customErrorMessage += `\n\tredirects to ${redirectTo}`
|
||||
} else {
|
||||
customErrorMessage += '\tPage not found'
|
||||
}
|
||||
|
||||
@@ -88,7 +88,7 @@ async function main() {
|
||||
console.log(`\nUpdated ${updatedCount} files out of ${processedCount}`)
|
||||
}
|
||||
|
||||
function processFile(filePath: string, options: ScriptOptions) {
|
||||
function processFile(filePath: string, scriptOptions: ScriptOptions) {
|
||||
const fileContent = fs.readFileSync(filePath, 'utf8')
|
||||
const relativePath = path.relative(contentDir, filePath)
|
||||
|
||||
@@ -100,11 +100,11 @@ function processFile(filePath: string, options: ScriptOptions) {
|
||||
if (!data) return { processed: false, updated: false }
|
||||
|
||||
// Remove the legacy type property if option is passed
|
||||
const removeLegacyType = Boolean(options.removeType && data.type)
|
||||
const removeLegacyType = Boolean(scriptOptions.removeType && data.type)
|
||||
|
||||
const newContentType = determineContentType(relativePath, data.type || '')
|
||||
|
||||
if (options.dryRun) {
|
||||
if (scriptOptions.dryRun) {
|
||||
console.log(`\n${relativePath}`)
|
||||
if (!data.contentType) {
|
||||
console.log(` ✅ Would set contentType: "${newContentType}"`)
|
||||
@@ -144,7 +144,7 @@ function processFile(filePath: string, options: ScriptOptions) {
|
||||
// Write the file back
|
||||
fs.writeFileSync(filePath, frontmatter.stringify(content, data, { lineWidth: -1 } as any))
|
||||
|
||||
if (options.verbose) {
|
||||
if (scriptOptions.verbose) {
|
||||
console.log(`\n${relativePath}`)
|
||||
console.log(` ✅ Set contentType: "${newContentType}"`)
|
||||
if (removeLegacyType) {
|
||||
|
||||
@@ -115,10 +115,10 @@ async function main(options: Options) {
|
||||
|
||||
const toJson: AllDocument[] = []
|
||||
for (const doc of documents) {
|
||||
const { documents, ...rest } = doc
|
||||
const { documents: docDocuments, ...rest } = doc
|
||||
toJson.push({
|
||||
...rest,
|
||||
documents,
|
||||
documents: docDocuments,
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
@@ -34,7 +34,7 @@ export async function allDocuments(options: Options): Promise<AllDocument[]> {
|
||||
|
||||
const site = await warmServer(options.languages)
|
||||
const pages: Page[] = site.pageList
|
||||
const allDocuments: AllDocument[] = []
|
||||
const allDocumentsResult: AllDocument[] = []
|
||||
|
||||
type ByVersion = Map<string, Document[]>
|
||||
const byLanguage = new Map<string, ByVersion>()
|
||||
@@ -96,8 +96,8 @@ export async function allDocuments(options: Options): Promise<AllDocument[]> {
|
||||
}
|
||||
for (const [language, byVersion] of byLanguage) {
|
||||
for (const [version, documents] of byVersion) {
|
||||
allDocuments.push({ version, language, documents })
|
||||
allDocumentsResult.push({ version, language, documents })
|
||||
}
|
||||
}
|
||||
return allDocuments
|
||||
return allDocumentsResult
|
||||
}
|
||||
|
||||
@@ -400,11 +400,11 @@ function addToChildren(newPath, positions, opts) {
|
||||
}
|
||||
|
||||
if (CHILDGROUPS_KEY in data) {
|
||||
for (const [groupIndex, childrenPosition] of childGroupPositions) {
|
||||
for (const [groupIndex, groupChildPosition] of childGroupPositions) {
|
||||
if (groupIndex < data[CHILDGROUPS_KEY].length) {
|
||||
const group = data[CHILDGROUPS_KEY][groupIndex]
|
||||
if (childrenPosition < group.children.length) {
|
||||
group.children.splice(childrenPosition, 0, newName)
|
||||
if (groupChildPosition < group.children.length) {
|
||||
group.children.splice(groupChildPosition, 0, newName)
|
||||
} else {
|
||||
group.children.push(newName)
|
||||
}
|
||||
|
||||
@@ -35,11 +35,12 @@ for (const page of pages) {
|
||||
fs.mkdirSync(`${contentCopilotDir}/${dirnames}`, { recursive: true })
|
||||
// Context needed to render the content liquid
|
||||
const req = { language: 'en' } as ExtendedRequest
|
||||
const contextualize = (req: ExtendedRequest): void => {
|
||||
if (!req.context) return
|
||||
if (!req.context.currentVersion) return
|
||||
req.context.currentVersionObj = req.context.allVersions?.[req.context.currentVersion]
|
||||
shortVersionsMiddleware(req, null, () => {})
|
||||
const contextualize = (request: ExtendedRequest): void => {
|
||||
if (!request.context) return
|
||||
if (!request.context.currentVersion) return
|
||||
request.context.currentVersionObj =
|
||||
request.context.allVersions?.[request.context.currentVersion]
|
||||
shortVersionsMiddleware(request, null, () => {})
|
||||
}
|
||||
|
||||
req.context = {
|
||||
|
||||
@@ -29,10 +29,11 @@ async function main(nameTuple: [string, string]) {
|
||||
if (data) assert(data.redirect_from.includes(oldHref), `Redirect from ${oldHref} not found`)
|
||||
{
|
||||
const parentIndexMd = path.join(path.dirname(after), 'index.md')
|
||||
const fileContent = fs.readFileSync(parentIndexMd, 'utf-8')
|
||||
const { data } = readFrontmatter(fileContent)
|
||||
const parentFileContent = fs.readFileSync(parentIndexMd, 'utf-8')
|
||||
const { data: parentData } = readFrontmatter(parentFileContent)
|
||||
const afterShortname = `/${after.split('/').slice(-1)[0].replace(/\.md$/, '')}`
|
||||
if (data) assert(data.children.includes(afterShortname), `Child ${afterShortname} not found`)
|
||||
if (parentData)
|
||||
assert(parentData.children.includes(afterShortname), `Child ${afterShortname} not found`)
|
||||
}
|
||||
} else {
|
||||
const fileContent = fs.readFileSync(path.join(after, 'index.md'), 'utf-8')
|
||||
@@ -41,10 +42,11 @@ async function main(nameTuple: [string, string]) {
|
||||
if (data) assert(data.redirect_from.includes(oldHref), `Redirect from ${oldHref} not found`)
|
||||
{
|
||||
const parentIndexMd = path.join(path.dirname(after), 'index.md')
|
||||
const fileContent = fs.readFileSync(parentIndexMd, 'utf-8')
|
||||
const { data } = readFrontmatter(fileContent)
|
||||
const parentFileContent = fs.readFileSync(parentIndexMd, 'utf-8')
|
||||
const { data: parentData } = readFrontmatter(parentFileContent)
|
||||
const afterShortname = `/${after.split('/').slice(-1)}`
|
||||
if (data) assert(data.children.includes(afterShortname), `Child ${afterShortname} not found`)
|
||||
if (parentData)
|
||||
assert(parentData.children.includes(afterShortname), `Child ${afterShortname} not found`)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -96,7 +96,7 @@ async function main(): Promise<void> {
|
||||
async function processFile(
|
||||
file: string,
|
||||
slugger: GithubSlugger,
|
||||
options: ScriptOptions,
|
||||
scriptOptions: ScriptOptions,
|
||||
): Promise<string[] | null> {
|
||||
const { data } = frontmatter(fs.readFileSync(file, 'utf8')) as unknown as {
|
||||
data: PageFrontmatter
|
||||
@@ -105,7 +105,7 @@ async function processFile(
|
||||
const isDirectory = isDirectoryCheck(file)
|
||||
|
||||
// Assess the frontmatter and other conditions to determine if we want to process the path.
|
||||
const processPage: boolean = determineProcessStatus(data, isDirectory, options)
|
||||
const processPage: boolean = determineProcessStatus(data, isDirectory, scriptOptions)
|
||||
if (!processPage) return null
|
||||
|
||||
let stringToSlugify: string = data.shortTitle || data.title
|
||||
@@ -153,10 +153,10 @@ async function processFile(
|
||||
return [contentPath, newContentPath]
|
||||
}
|
||||
|
||||
function moveFile(result: string[], options: ScriptOptions): void {
|
||||
function moveFile(result: string[], scriptOptions: ScriptOptions): void {
|
||||
const [contentPath, newContentPath] = result
|
||||
|
||||
if (options.dryRun) {
|
||||
if (scriptOptions.dryRun) {
|
||||
console.log('Move:\n', contentPath, '\nto:\n', newContentPath, '\n')
|
||||
return
|
||||
}
|
||||
@@ -214,7 +214,7 @@ function sortFiles(filesArray: string[]): string[] {
|
||||
})
|
||||
}
|
||||
|
||||
function filterFiles(contentDir: string, options: ScriptOptions) {
|
||||
function filterFiles(contentDir: string, scriptOptions: ScriptOptions) {
|
||||
return walkFiles(contentDir, ['.md']).filter((file: string) => {
|
||||
// Never move readmes
|
||||
if (file.endsWith('README.md')) return false
|
||||
@@ -226,9 +226,9 @@ function filterFiles(contentDir: string, options: ScriptOptions) {
|
||||
if (path.relative(contentDir, file).split(path.sep)[1] === 'index.md') return false
|
||||
|
||||
// If no specific paths are passed, we are done filtering.
|
||||
if (!options.paths) return true
|
||||
if (!scriptOptions.paths) return true
|
||||
|
||||
return options.paths.some((p: string) => {
|
||||
return scriptOptions.paths.some((p: string) => {
|
||||
// Allow either a full content path like "content/foo/bar.md"
|
||||
// or a top-level directory name like "copilot"
|
||||
if (!p.startsWith('content')) {
|
||||
@@ -247,15 +247,15 @@ function filterFiles(contentDir: string, options: ScriptOptions) {
|
||||
function determineProcessStatus(
|
||||
data: PageFrontmatter,
|
||||
isDirectory: boolean,
|
||||
options: ScriptOptions,
|
||||
scriptOptions: ScriptOptions,
|
||||
): boolean {
|
||||
// Assess the conditions in this order:
|
||||
// If it's a directory AND we're excluding dirs, do not process it no matter what.
|
||||
if (isDirectory && options.excludeDirs) {
|
||||
if (isDirectory && scriptOptions.excludeDirs) {
|
||||
return false
|
||||
}
|
||||
// If the force option is passed, process it no matter what.
|
||||
if (options.force) {
|
||||
if (scriptOptions.force) {
|
||||
return true
|
||||
}
|
||||
// If the page has the override set, do not process it.
|
||||
|
||||
@@ -66,7 +66,7 @@ describe('annotate', () => {
|
||||
})
|
||||
|
||||
test('renders bash with hash bang annotations', async () => {
|
||||
const example = `
|
||||
const bashExample = `
|
||||
\`\`\`bash annotate
|
||||
# The next line is the hash bang
|
||||
#!/usr/bin/env bash
|
||||
@@ -75,11 +75,11 @@ describe('annotate', () => {
|
||||
echo "Hello, world!"
|
||||
\`\`\`
|
||||
`.trim()
|
||||
const res = await renderContent(example)
|
||||
const res = await renderContent(bashExample)
|
||||
const $ = cheerio.load(res)
|
||||
|
||||
const headerCode = $('header pre').text()
|
||||
expect(headerCode).toMatch(example.split('\n').slice(1, -1).join('\n'))
|
||||
expect(headerCode).toMatch(bashExample.split('\n').slice(1, -1).join('\n'))
|
||||
const rows = $('.annotate-row')
|
||||
const notes = $('.annotate-note p', rows)
|
||||
const noteTexts = notes.map((i, el) => $(el).text()).get()
|
||||
@@ -90,7 +90,7 @@ echo "Hello, world!"
|
||||
})
|
||||
|
||||
test("doesn't complain if the first comment is empty", async () => {
|
||||
const example = `
|
||||
const emptyCommentExample = `
|
||||
\`\`\`yaml annotate copy
|
||||
#
|
||||
name: Create and publish a Docker image
|
||||
@@ -103,11 +103,11 @@ on:
|
||||
\`\`\`
|
||||
`.trim()
|
||||
|
||||
const res = await renderContent(example)
|
||||
const res = await renderContent(emptyCommentExample)
|
||||
const $ = cheerio.load(res)
|
||||
|
||||
const headerCode = $('header pre').text()
|
||||
expect(headerCode).toMatch(example.split('\n').slice(1, -1).join('\n'))
|
||||
expect(headerCode).toMatch(emptyCommentExample.split('\n').slice(1, -1).join('\n'))
|
||||
const rows = $('.annotate-row')
|
||||
const notes = $('.annotate-note p', rows)
|
||||
const noteTexts = notes.map((i, el) => $(el).text()).get()
|
||||
@@ -121,7 +121,7 @@ on:
|
||||
})
|
||||
|
||||
test('supports AUTOTITLE links in annotations', async () => {
|
||||
const example = `
|
||||
const autotitleExample = `
|
||||
\`\`\`yaml annotate copy
|
||||
# For more information about workflow syntax, see [AUTOTITLE](/get-started/start-your-journey/hello-world).
|
||||
name: Test workflow
|
||||
@@ -151,7 +151,7 @@ on: [push]
|
||||
// Mock test object doesn't need all Context properties, using 'as unknown as' to bypass strict type checking
|
||||
} as unknown as Context
|
||||
|
||||
const res = await renderContent(example, mockContext)
|
||||
const res = await renderContent(autotitleExample, mockContext)
|
||||
const $ = cheerio.load(res)
|
||||
|
||||
const rows = $('.annotate-row')
|
||||
|
||||
@@ -122,8 +122,8 @@ function createAnnotatedNode(node: ElementNode, context: any): any {
|
||||
const rows = chunk(groups, 2)
|
||||
|
||||
// Check the rows are formatted correctly
|
||||
for (const [note, code] of rows) {
|
||||
if (note === undefined || code === undefined) {
|
||||
for (const [note, codeBlock] of rows) {
|
||||
if (note === undefined || codeBlock === undefined) {
|
||||
throw new Error(
|
||||
"Each annotation must have a note and a code block. If you're trying to create a blank annotation, you can use a single line comment with a space after it.",
|
||||
)
|
||||
@@ -231,13 +231,13 @@ function template({
|
||||
h(
|
||||
'div',
|
||||
{ className: 'annotate-beside' },
|
||||
rows.map(([note, code]) =>
|
||||
rows.map(([note, codeBlock]) =>
|
||||
h('div', { className: 'annotate-row' }, [
|
||||
h(
|
||||
'div',
|
||||
{ className: 'annotate-code' },
|
||||
// pre > code matches the mdast -> hast tree of a regular fenced code block.
|
||||
h('pre', h('code', { className: `language-${lang}` }, code.join('\n'))),
|
||||
h('pre', h('code', { className: `language-${lang}` }, codeBlock.join('\n'))),
|
||||
),
|
||||
h(
|
||||
'div',
|
||||
|
||||
@@ -108,8 +108,8 @@ function btnIcon(): Element {
|
||||
const btnIconHtml: string = octicons.copy.toSVG()
|
||||
const btnIconAst = parse(String(btnIconHtml), { sourceCodeLocationInfo: true })
|
||||
// @ts-ignore - fromParse5 file option typing issue
|
||||
const btnIcon = fromParse5(btnIconAst, { file: btnIconHtml })
|
||||
return btnIcon as Element
|
||||
const btnIconElement = fromParse5(btnIconAst, { file: btnIconHtml })
|
||||
return btnIconElement as Element
|
||||
}
|
||||
|
||||
// Using any due to conflicting unist/hast type definitions between dependencies
|
||||
|
||||
@@ -85,6 +85,6 @@ function findMatchingCode(ref: string, tree: any): any {
|
||||
function copilotIcon(): any {
|
||||
const copilotIconHtml = octicons.copilot.toSVG()
|
||||
const copilotIconAst = parse(String(copilotIconHtml), { sourceCodeLocationInfo: true })
|
||||
const copilotIcon = fromParse5(copilotIconAst, { file: copilotIconHtml })
|
||||
return copilotIcon
|
||||
const copilotIconElement = fromParse5(copilotIconAst, { file: copilotIconHtml })
|
||||
return copilotIconElement
|
||||
}
|
||||
|
||||
@@ -185,14 +185,14 @@ function processLinkNode(node: Link, language: string, version: string, nodes: N
|
||||
language === 'en'
|
||||
) {
|
||||
// Throw if the link text *almost* is AUTOTITLE
|
||||
const textChild = child as Text
|
||||
const childText = child as Text
|
||||
if (
|
||||
textChild.value.toUpperCase() === 'AUTOTITLE' ||
|
||||
distance(textChild.value.toUpperCase(), 'AUTOTITLE') <= 2
|
||||
childText.value.toUpperCase() === 'AUTOTITLE' ||
|
||||
distance(childText.value.toUpperCase(), 'AUTOTITLE') <= 2
|
||||
) {
|
||||
throw new Error(
|
||||
`Found link text '${textChild.value}', expected 'AUTOTITLE'. ` +
|
||||
`Find the mention of the link text '${textChild.value}' and change it to 'AUTOTITLE'. Case matters.`,
|
||||
`Found link text '${childText.value}', expected 'AUTOTITLE'. ` +
|
||||
`Find the mention of the link text '${childText.value}' and change it to 'AUTOTITLE'. Case matters.`,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -177,23 +177,23 @@ function moveVariable(dataRef: string): void {
|
||||
const nonAltPath: string = newVariablePath.replace('-alt.yml', '.yml')
|
||||
const oldAltPath: string = oldVariablePath.replace('.yml', '-alt.yml')
|
||||
|
||||
let oldPath: string = oldVariablePath
|
||||
let oldVariableFinalPath: string = oldVariablePath
|
||||
|
||||
// If the old variable path doesn't exist, assume no migration needed.
|
||||
if (!fs.existsSync(oldVariablePath)) {
|
||||
if (!fs.existsSync(oldVariableFinalPath)) {
|
||||
if (!fs.existsSync(newVariablePath)) {
|
||||
console.log(`Problem migrating files for ${dataRef}`)
|
||||
return
|
||||
}
|
||||
if (fs.existsSync(oldAltPath)) {
|
||||
oldPath = oldAltPath
|
||||
oldVariableFinalPath = oldAltPath
|
||||
} else {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
const variableFileContent: Record<string, any> = yaml.load(
|
||||
fs.readFileSync(oldPath, 'utf8'),
|
||||
fs.readFileSync(oldVariableFinalPath, 'utf8'),
|
||||
) as Record<string, any>
|
||||
const value: any = variableFileContent[variableKey]
|
||||
|
||||
|
||||
@@ -59,10 +59,10 @@ export const Survey = () => {
|
||||
}
|
||||
}, [state])
|
||||
|
||||
function vote(vote: VoteState) {
|
||||
function vote(userVote: VoteState) {
|
||||
return () => {
|
||||
trackEvent(getEventData(vote === VoteState.YES))
|
||||
setVoteState(vote)
|
||||
trackEvent(getEventData(userVote === VoteState.YES))
|
||||
setVoteState(userVote)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -93,9 +93,9 @@ export const Survey = () => {
|
||||
setComment('')
|
||||
}
|
||||
|
||||
function getEventData(vote: boolean): EventData {
|
||||
function getEventData(voteValue: boolean): EventData {
|
||||
return {
|
||||
vote,
|
||||
vote: voteValue,
|
||||
comment,
|
||||
email,
|
||||
token,
|
||||
|
||||
@@ -46,13 +46,13 @@ export function useShouldShowExperiment(experimentKey: ExperimentNames | { key:
|
||||
|
||||
useEffect(() => {
|
||||
const updateShouldShow = async () => {
|
||||
const isStaff = await getIsStaff()
|
||||
const staffStatus = await getIsStaff()
|
||||
setShowExperiment(
|
||||
shouldShowExperiment(
|
||||
experimentKey,
|
||||
router.locale || '',
|
||||
mainContext.currentVersion || '',
|
||||
isStaff,
|
||||
staffStatus,
|
||||
router.query,
|
||||
),
|
||||
)
|
||||
|
||||
@@ -48,17 +48,20 @@ export const SIGNAL_RATINGS = [
|
||||
{
|
||||
reduction: 0.2,
|
||||
name: 'not-language',
|
||||
validator: (comment: string, language: string) => isNotLanguage(comment, language),
|
||||
validator: (comment: string, commentLanguage: string) =>
|
||||
isNotLanguage(comment, commentLanguage),
|
||||
},
|
||||
{
|
||||
reduction: 0.3,
|
||||
name: 'cuss-words-likely',
|
||||
validator: (comment: string, language: string) => isLikelyCussWords(comment, language),
|
||||
validator: (comment: string, commentLanguage: string) =>
|
||||
isLikelyCussWords(comment, commentLanguage),
|
||||
},
|
||||
{
|
||||
reduction: 0.1,
|
||||
name: 'cuss-words-maybe',
|
||||
validator: (comment: string, language: string) => isMaybeCussWords(comment, language),
|
||||
validator: (comment: string, commentLanguage: string) =>
|
||||
isMaybeCussWords(comment, commentLanguage),
|
||||
},
|
||||
{
|
||||
reduction: 0.2,
|
||||
@@ -91,11 +94,11 @@ export async function getGuessedLanguage(comment: string) {
|
||||
return bestGuess.alpha2 || undefined
|
||||
}
|
||||
|
||||
export async function analyzeComment(text: string, language = 'en') {
|
||||
export async function analyzeComment(text: string, commentLanguage = 'en') {
|
||||
const signals = []
|
||||
let rating = 1.0
|
||||
for (const { reduction, name, validator } of SIGNAL_RATINGS) {
|
||||
if (validator(text, language)) {
|
||||
if (validator(text, commentLanguage)) {
|
||||
signals.push(name)
|
||||
rating -= reduction
|
||||
}
|
||||
|
||||
@@ -10,9 +10,9 @@ describe('formatErrors', () => {
|
||||
const { errors } = validateJson({ type: 'string' }, 0)
|
||||
const formattedErrors = formatErrors(errors || [], '')
|
||||
for (const formatted of formattedErrors) {
|
||||
const { isValid, errors } = validateJson(schemas.validation, formatted)
|
||||
const { isValid, errors: validationErrors } = validateJson(schemas.validation, formatted)
|
||||
if (!isValid) {
|
||||
throw new Error(errors?.map((e) => e.message).join(' -- '))
|
||||
throw new Error(validationErrors?.map((e) => e.message).join(' -- '))
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
@@ -27,11 +27,11 @@ async function alterExperimentsInPage(
|
||||
}
|
||||
for (const experiment of getActiveExperiments('all')) {
|
||||
await page.evaluate(
|
||||
({ experimentKey, variation }) => {
|
||||
({ experimentKey, variationType }) => {
|
||||
// @ts-expect-error overrideControlGroup is a custom function added to the window object
|
||||
window.overrideControlGroup(experimentKey, variation)
|
||||
window.overrideControlGroup(experimentKey, variationType)
|
||||
},
|
||||
{ experimentKey: experiment.key, variation },
|
||||
{ experimentKey: experiment.key, variationType: variation },
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -78,13 +78,13 @@ describe('post', () => {
|
||||
// Test what happens to `Cram{% ifversion fpt %}FPT{% endif %}ped.`
|
||||
// when it's not free-pro-team.
|
||||
{
|
||||
const $: cheerio.Root = await getDOM(
|
||||
const $inner: cheerio.Root = await getDOM(
|
||||
'/enterprise-server@latest/get-started/liquid/whitespace',
|
||||
)
|
||||
const html = $('#article-contents').html()
|
||||
const innerHtml = $inner('#article-contents').html()
|
||||
// Assures that there's not whitespace left when the `{% ifversion %}`
|
||||
// yields an empty string.
|
||||
expect(html).toMatch('Cramped')
|
||||
expect(innerHtml).toMatch('Cramped')
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
@@ -36,11 +36,11 @@ export const UtmPreserver = () => {
|
||||
}
|
||||
|
||||
// Add UTM parameters to a URL
|
||||
const addUtmParamsToUrl = (url: string, utmParams: URLSearchParams): string => {
|
||||
const addUtmParamsToUrl = (url: string, params: URLSearchParams): string => {
|
||||
try {
|
||||
const urlObj = new URL(url)
|
||||
|
||||
for (const [key, value] of utmParams) {
|
||||
for (const [key, value] of params) {
|
||||
urlObj.searchParams.set(key, value)
|
||||
}
|
||||
|
||||
|
||||
@@ -34,9 +34,9 @@ export default async function createTree(
|
||||
// wrong.
|
||||
try {
|
||||
mtime = await getMtime(filepath)
|
||||
} catch (error) {
|
||||
if ((error as NodeJS.ErrnoException).code !== 'ENOENT') {
|
||||
throw error
|
||||
} catch (innerError) {
|
||||
if ((innerError as NodeJS.ErrnoException).code !== 'ENOENT') {
|
||||
throw innerError
|
||||
}
|
||||
// Throw an error if we can't find a content file associated with the children: entry.
|
||||
// But don't throw an error if the user is running the site locally and hasn't cloned the Early Access repo.
|
||||
|
||||
@@ -391,11 +391,11 @@ export const loadPages = loadPageList
|
||||
// Create an object from the list of all pages with permalinks as keys for fast lookup.
|
||||
export function createMapFromArray(pageList: Page[]): Record<string, Page> {
|
||||
const pageMap = pageList.reduce(
|
||||
(pageMap: Record<string, Page>, page: Page) => {
|
||||
(accumulatedMap: Record<string, Page>, page: Page) => {
|
||||
for (const permalink of page.permalinks) {
|
||||
pageMap[permalink.href] = page
|
||||
accumulatedMap[permalink.href] = page
|
||||
}
|
||||
return pageMap
|
||||
return accumulatedMap
|
||||
},
|
||||
{} as Record<string, Page>,
|
||||
)
|
||||
|
||||
@@ -58,12 +58,12 @@ export function readCompressedJsonFileFallbackLazily(xpath: string): () => any {
|
||||
if (err.code === 'ENOENT') {
|
||||
try {
|
||||
fs.accessSync(`${xpath}.br`)
|
||||
} catch (err: any) {
|
||||
// err is any because fs errors can have various shapes with code property
|
||||
if (err.code === 'ENOENT') {
|
||||
} catch (innerErr: any) {
|
||||
// innerErr is any because fs errors can have various shapes with code property
|
||||
if (innerErr.code === 'ENOENT') {
|
||||
throw new Error(`Neither ${xpath} nor ${xpath}.br is accessible`)
|
||||
}
|
||||
throw err
|
||||
throw innerErr
|
||||
}
|
||||
} else {
|
||||
throw err
|
||||
|
||||
@@ -68,10 +68,10 @@ export default function appRouterGateway(req: ExtendedRequest, res: Response, ne
|
||||
if (shouldUseAppRouter(path, pageFound)) {
|
||||
console.log(`[INFO] Using App Router for path: ${path} (pageFound: ${!!pageFound})`)
|
||||
|
||||
const strippedPath = stripLocalePrefix(path)
|
||||
const innerStrippedPath = stripLocalePrefix(path)
|
||||
|
||||
// For 404 routes, always route to our 404 page
|
||||
if (strippedPath === '/404' || strippedPath === '/_not-found' || !pageFound) {
|
||||
if (innerStrippedPath === '/404' || innerStrippedPath === '/_not-found' || !pageFound) {
|
||||
req.url = '/404'
|
||||
res.status(404)
|
||||
defaultCacheControl(res)
|
||||
|
||||
@@ -44,13 +44,13 @@ function getBreadcrumbs(req: ExtendedRequest, isEarlyAccess: boolean) {
|
||||
}
|
||||
}
|
||||
|
||||
const breadcrumbs = traverseTreeTitles(
|
||||
const breadcrumbsResult = traverseTreeTitles(
|
||||
req.context.currentPath,
|
||||
req.context.currentProductTreeTitles,
|
||||
)
|
||||
;[...Array(cutoff)].forEach(() => breadcrumbs.shift())
|
||||
;[...Array(cutoff)].forEach(() => breadcrumbsResult.shift())
|
||||
|
||||
return breadcrumbs
|
||||
return breadcrumbsResult
|
||||
}
|
||||
|
||||
// Return an array as if you'd traverse down a tree. Imagine a tree like
|
||||
|
||||
@@ -91,20 +91,20 @@ export default async function contextualize(
|
||||
// The reason this is a function is because most of the time, we don't
|
||||
// need to know the English equivalent. It only comes into play if a
|
||||
// translated
|
||||
req.context.getEnglishPage = (context) => {
|
||||
if (!context.enPage) {
|
||||
const { page } = context
|
||||
req.context.getEnglishPage = (ctx) => {
|
||||
if (!ctx.enPage) {
|
||||
const { page } = ctx
|
||||
if (!page) {
|
||||
throw new Error("The 'page' has not been put into the context yet.")
|
||||
}
|
||||
const enPath = context.currentPath!.replace(`/${page.languageCode}`, '/en')
|
||||
const enPage = context.pages![enPath]
|
||||
const enPath = ctx.currentPath!.replace(`/${page.languageCode}`, '/en')
|
||||
const enPage = ctx.pages![enPath]
|
||||
if (!enPage) {
|
||||
throw new Error(`Unable to find equivalent English page by the path '${enPath}'`)
|
||||
}
|
||||
context.enPage = enPage
|
||||
ctx.enPage = enPage
|
||||
}
|
||||
return context.enPage
|
||||
return ctx.enPage
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -41,7 +41,7 @@ export default async function glossaries(req: ExtendedRequest, res: Response, ne
|
||||
'glossaries.external',
|
||||
req.context.currentLanguage!,
|
||||
)
|
||||
const glossaries = (
|
||||
const glossariesList = (
|
||||
await Promise.all(
|
||||
glossariesRaw.map(async (glossary) => {
|
||||
let { description } = glossary
|
||||
@@ -80,7 +80,7 @@ export default async function glossaries(req: ExtendedRequest, res: Response, ne
|
||||
)
|
||||
).filter(Boolean)
|
||||
|
||||
req.context.glossaries = glossaries.sort((a, b) =>
|
||||
req.context.glossaries = glossariesList.sort((a, b) =>
|
||||
a.term.localeCompare(b.term, req.context!.currentLanguage),
|
||||
)
|
||||
|
||||
|
||||
@@ -82,11 +82,11 @@ const asyncMiddleware =
|
||||
<TReq extends Request = Request, T = void>(
|
||||
fn: (req: TReq, res: Response, next: NextFunction) => T | Promise<T>,
|
||||
) =>
|
||||
async (req: Request, res: Response, next: NextFunction) => {
|
||||
async (req: Request, res: Response, nextFn: NextFunction) => {
|
||||
try {
|
||||
await fn(req as TReq, res, next)
|
||||
await fn(req as TReq, res, nextFn)
|
||||
} catch (error) {
|
||||
next(error)
|
||||
nextFn(error)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@ export const nextApp = next({ dev: isDevelopment })
|
||||
export const nextHandleRequest = nextApp.getRequestHandler()
|
||||
await nextApp.prepare()
|
||||
|
||||
function renderPageWithNext(req: ExtendedRequest, res: Response, next: NextFunction) {
|
||||
function renderPageWithNext(req: ExtendedRequest, res: Response, nextFn: NextFunction) {
|
||||
if (req.path.startsWith('/_next') && !req.path.startsWith('/_next/data')) {
|
||||
return nextHandleRequest(req, res)
|
||||
}
|
||||
@@ -20,7 +20,7 @@ function renderPageWithNext(req: ExtendedRequest, res: Response, next: NextFunct
|
||||
// '/_next/static/webpack/64e44ef62e261d3a.webpack.hot-update.json' has to
|
||||
// go through here.
|
||||
|
||||
return next()
|
||||
return nextFn()
|
||||
}
|
||||
|
||||
export default renderPageWithNext
|
||||
|
||||
@@ -42,11 +42,11 @@ describe('manifest', () => {
|
||||
expect(manifest.icons.length).toBeGreaterThan(0)
|
||||
await Promise.all(
|
||||
manifest.icons.map(async (icon) => {
|
||||
const res = await get(icon.src, { responseType: 'buffer' })
|
||||
expect(res.statusCode).toBe(200)
|
||||
expect(res.headers['content-type']).toBe(icon.type)
|
||||
const iconRes = await get(icon.src, { responseType: 'buffer' })
|
||||
expect(iconRes.statusCode).toBe(200)
|
||||
expect(iconRes.headers['content-type']).toBe(icon.type)
|
||||
// The `sizes` should match the payload
|
||||
const image = sharp(res.body)
|
||||
const image = sharp(iconRes.body)
|
||||
const [width, height] = icon.sizes.split('x').map((s) => parseInt(s))
|
||||
const dimensions = await image.metadata()
|
||||
expect(dimensions.width).toBe(width)
|
||||
|
||||
@@ -75,7 +75,7 @@ describe('pages module', () => {
|
||||
// Only consider as duplicate if more than one unique file defines the same redirect
|
||||
const duplicates = Array.from(redirectToFiles.entries())
|
||||
.filter(([, files]) => files.size > 1)
|
||||
.map(([path]) => path)
|
||||
.map(([redirectPath]) => redirectPath)
|
||||
|
||||
// Build a detailed message with sources for each duplicate
|
||||
const message = `Found ${duplicates.length} duplicate redirect_from path${duplicates.length === 1 ? '' : 's'}.
|
||||
|
||||
@@ -126,7 +126,7 @@ export async function syncGitHubAppsData(
|
||||
const { progAccessData, progActorResources } = await getProgAccessData(progAccessSource)
|
||||
|
||||
for (const schemaName of sourceSchemas) {
|
||||
const data = JSON.parse(
|
||||
const schemaData = JSON.parse(
|
||||
await readFile(path.join(openApiSource, schemaName), 'utf8'),
|
||||
) as OpenApiData
|
||||
const appsDataConfig = JSON.parse(await readFile(CONFIG_FILE, 'utf8')) as AppsDataConfig
|
||||
@@ -138,7 +138,7 @@ export async function syncGitHubAppsData(
|
||||
}
|
||||
// Because the information used on the apps page doesn't require any
|
||||
// rendered content we can parse the dereferenced files directly
|
||||
for (const [requestPath, operationsAtPath] of Object.entries(data.paths)) {
|
||||
for (const [requestPath, operationsAtPath] of Object.entries(schemaData.paths)) {
|
||||
for (const [verb, operation] of Object.entries(operationsAtPath)) {
|
||||
// We only want to process operations that have programmatic access data
|
||||
if (!progAccessData[operation.operationId]) continue
|
||||
@@ -491,17 +491,17 @@ export function shouldFilterMetadataPermission(
|
||||
export function isActorExcluded(
|
||||
excludedActors: string[] | undefined | null | unknown,
|
||||
actorType: string,
|
||||
actorTypeMap: Record<string, string> = {},
|
||||
actorMapping: Record<string, string> = {},
|
||||
): boolean {
|
||||
if (!excludedActors || !Array.isArray(excludedActors)) {
|
||||
return false
|
||||
}
|
||||
|
||||
// Map generic actor type to actual YAML value if mapping exists
|
||||
const actualActorType = actorTypeMap[actorType] || actorType
|
||||
const mappedActorType = actorMapping[actorType] || actorType
|
||||
|
||||
// Check if the mapped actor type is excluded
|
||||
if (excludedActors.includes(actualActorType)) {
|
||||
if (excludedActors.includes(mappedActorType)) {
|
||||
return true
|
||||
}
|
||||
|
||||
@@ -571,7 +571,7 @@ async function getProgActorResourceContent({
|
||||
owner,
|
||||
repo,
|
||||
branch,
|
||||
path,
|
||||
path: resourcePath,
|
||||
gitHubSourceDirectory = null,
|
||||
}: ProgActorResourceContentOptions): Promise<ProgActorResources> {
|
||||
// Get files either locally from disk or from the GitHub remote repo
|
||||
@@ -579,7 +579,7 @@ async function getProgActorResourceContent({
|
||||
if (gitHubSourceDirectory) {
|
||||
files = await getProgActorContentFromDisk(gitHubSourceDirectory)
|
||||
} else {
|
||||
files = await getDirectoryContents(owner!, repo!, branch!, path!)
|
||||
files = await getDirectoryContents(owner!, repo!, branch!, resourcePath!)
|
||||
}
|
||||
|
||||
// We need to format the file content into a single object. Each file
|
||||
|
||||
@@ -20,8 +20,8 @@ export function Changelog({ changelogItems }: Props) {
|
||||
<React.Fragment key={index}>
|
||||
<p>{change.title}</p>
|
||||
<ul>
|
||||
{change.changes.map((change) => (
|
||||
<li key={change} dangerouslySetInnerHTML={{ __html: change }} />
|
||||
{change.changes.map((changeItem) => (
|
||||
<li key={changeItem} dangerouslySetInnerHTML={{ __html: changeItem }} />
|
||||
))}
|
||||
</ul>
|
||||
</React.Fragment>
|
||||
@@ -30,8 +30,8 @@ export function Changelog({ changelogItems }: Props) {
|
||||
<React.Fragment key={index}>
|
||||
<p>{change.title}</p>
|
||||
<ul>
|
||||
{change.changes.map((change) => (
|
||||
<li key={change} dangerouslySetInnerHTML={{ __html: change }} />
|
||||
{change.changes.map((changeItem) => (
|
||||
<li key={changeItem} dangerouslySetInnerHTML={{ __html: changeItem }} />
|
||||
))}
|
||||
</ul>
|
||||
</React.Fragment>
|
||||
@@ -39,8 +39,8 @@ export function Changelog({ changelogItems }: Props) {
|
||||
{(item.upcomingChanges || []).map((change, index) => (
|
||||
<React.Fragment key={index}>
|
||||
<p>{change.title}</p>
|
||||
{change.changes.map((change) => (
|
||||
<li key={change} dangerouslySetInnerHTML={{ __html: change }} />
|
||||
{change.changes.map((changeItem) => (
|
||||
<li key={changeItem} dangerouslySetInnerHTML={{ __html: changeItem }} />
|
||||
))}
|
||||
</React.Fragment>
|
||||
))}
|
||||
|
||||
@@ -389,10 +389,10 @@ export default async function processSchemas(
|
||||
}
|
||||
|
||||
await Promise.all(
|
||||
mutationReturnFields.fields!.map(async (field: FieldDefinitionNode) => {
|
||||
mutationReturnFields.fields!.map(async (returnFieldDef: FieldDefinitionNode) => {
|
||||
const returnField: Partial<ReturnFieldInfo> = {}
|
||||
returnField.name = field.name.value
|
||||
const fieldType = helpers.getType(field)
|
||||
returnField.name = returnFieldDef.name.value
|
||||
const fieldType = helpers.getType(returnFieldDef)
|
||||
if (!fieldType) return
|
||||
returnField.type = fieldType
|
||||
returnField.id = helpers.getId(returnField.type)
|
||||
|
||||
@@ -128,8 +128,8 @@ function getFullLink(baseType: string, id: string): string {
|
||||
return `/graphql/reference/${baseType}#${id}`
|
||||
}
|
||||
|
||||
function getId(path: string): string {
|
||||
return removeMarkers(path).toLowerCase()
|
||||
function getId(typeName: string): string {
|
||||
return removeMarkers(typeName).toLowerCase()
|
||||
}
|
||||
|
||||
// e.g., given `ObjectTypeDefinition`, get `objects`
|
||||
|
||||
@@ -45,18 +45,18 @@ export const ProductSelectionCard = ({ group }: ProductSelectionCardProps) => {
|
||||
height: '22px',
|
||||
}
|
||||
|
||||
function icon(group: ProductGroupT) {
|
||||
if (group.icon) {
|
||||
function icon(productGroup: ProductGroupT) {
|
||||
if (productGroup.icon) {
|
||||
return (
|
||||
<div className="pr-3">
|
||||
<img src={group.icon} alt={group.name} style={groupIcon}></img>
|
||||
<img src={productGroup.icon} alt={productGroup.name} style={groupIcon}></img>
|
||||
</div>
|
||||
)
|
||||
} else if (group.octicon) {
|
||||
const octicon: React.FunctionComponent = octiconMap[group.octicon]
|
||||
} else if (productGroup.octicon) {
|
||||
const octicon: React.FunctionComponent = octiconMap[productGroup.octicon]
|
||||
|
||||
if (!octicon) {
|
||||
throw new Error(`Octicon ${group.octicon} not found`)
|
||||
throw new Error(`Octicon ${productGroup.octicon} not found`)
|
||||
}
|
||||
|
||||
return (
|
||||
|
||||
@@ -90,8 +90,8 @@ export const languagePrefixPathRegex: RegExp = new RegExp(`^/(${languageKeys.joi
|
||||
* if it's something like /foo or /foo/bar or /fr (because French (fr)
|
||||
* is currently not an active language)
|
||||
*/
|
||||
export function pathLanguagePrefixed(path: string): boolean {
|
||||
return languagePrefixPathRegex.test(path)
|
||||
export function pathLanguagePrefixed(urlPath: string): boolean {
|
||||
return languagePrefixPathRegex.test(urlPath)
|
||||
}
|
||||
|
||||
export default languages
|
||||
|
||||
@@ -41,14 +41,14 @@ for (const language of languages) {
|
||||
}
|
||||
|
||||
function languagesFromString(str: string): string[] {
|
||||
const languages = str
|
||||
const parsedLanguages = str
|
||||
.split(/,/)
|
||||
.map((x) => x.trim())
|
||||
.filter(Boolean)
|
||||
if (!languages.every((lang) => languageKeys.includes(lang))) {
|
||||
if (!parsedLanguages.every((lang) => languageKeys.includes(lang))) {
|
||||
throw new Error(
|
||||
`Unrecognized language code (${languages.find((lang) => !languageKeys.includes(lang))})`,
|
||||
`Unrecognized language code (${parsedLanguages.find((lang) => !languageKeys.includes(lang))})`,
|
||||
)
|
||||
}
|
||||
return languages
|
||||
return parsedLanguages
|
||||
}
|
||||
|
||||
@@ -96,7 +96,7 @@ describe('release notes', () => {
|
||||
//
|
||||
// This is useful because if we test every single individual version of
|
||||
// every plan the test just takes way too long.
|
||||
const getReleaseNotesVersionCombinations = (langs: string[]) => {
|
||||
const getReleaseNotesVersionCombinations = (languages: string[]) => {
|
||||
const combinations = []
|
||||
const prefixes: string[] = []
|
||||
for (const version of page!.applicableVersions) {
|
||||
@@ -105,7 +105,7 @@ describe('release notes', () => {
|
||||
continue
|
||||
}
|
||||
prefixes.push(prefix)
|
||||
combinations.push(...langs.map((lang) => [lang, version]))
|
||||
combinations.push(...languages.map((lang) => [lang, version]))
|
||||
}
|
||||
return combinations
|
||||
}
|
||||
|
||||
@@ -320,8 +320,8 @@ describe('Translation Error Comments', () => {
|
||||
}
|
||||
|
||||
// Mock renderContent to simulate error for Japanese, success for English
|
||||
mockRenderContent.mockImplementation((template: string, context: any) => {
|
||||
if (context.currentLanguage !== 'en' && template.includes('badtag')) {
|
||||
mockRenderContent.mockImplementation((template: string, innerContext: any) => {
|
||||
if (innerContext.currentLanguage !== 'en' && template.includes('badtag')) {
|
||||
const error = new Error("Unknown tag 'badtag'")
|
||||
error.name = 'ParseError'
|
||||
;(error as any).token = {
|
||||
@@ -330,7 +330,7 @@ describe('Translation Error Comments', () => {
|
||||
}
|
||||
throw error
|
||||
}
|
||||
return context.currentLanguage === 'en' ? 'English Title' : template
|
||||
return innerContext.currentLanguage === 'en' ? 'English Title' : template
|
||||
})
|
||||
|
||||
const result = await renderContentWithFallback(mockPage, 'rawTitle', context)
|
||||
@@ -357,8 +357,8 @@ describe('Translation Error Comments', () => {
|
||||
},
|
||||
}
|
||||
|
||||
mockRenderContent.mockImplementation((template: string, context: any) => {
|
||||
if (context.currentLanguage !== 'en' && template.includes('badtag')) {
|
||||
mockRenderContent.mockImplementation((template: string, innerContext: any) => {
|
||||
if (innerContext.currentLanguage !== 'en' && template.includes('badtag')) {
|
||||
const error = new Error("Unknown tag 'badtag'")
|
||||
error.name = 'ParseError'
|
||||
throw error
|
||||
|
||||
@@ -57,10 +57,10 @@ describe('learning tracks', () => {
|
||||
let fixables = 0
|
||||
for (const [key, guides] of troubles) {
|
||||
errorMessage += `Under "${key}"...\n`
|
||||
for (const { uri, index, redirects } of guides) {
|
||||
if (redirects) {
|
||||
for (const { uri, index, redirects: redirectTo } of guides) {
|
||||
if (redirectTo) {
|
||||
fixables += 1
|
||||
errorMessage += ` guide: #${index + 1} ${uri} redirects to ${redirects}\n`
|
||||
errorMessage += ` guide: #${index + 1} ${uri} redirects to ${redirectTo}\n`
|
||||
} else {
|
||||
errorMessage += ` guide: #${index + 1} ${uri} is broken.\n`
|
||||
}
|
||||
|
||||
@@ -284,8 +284,8 @@ function fillPopover(
|
||||
const regex = /^\/(?<lang>\w{2}\/)?(?<version>[\w-]+@[\w-.]+\/)?(?<product>[\w-]+\/)?/
|
||||
const match = regex.exec(linkURL.pathname)
|
||||
if (match?.groups) {
|
||||
const { lang, version, product } = match.groups
|
||||
const productURL = [lang, version, product].map((n) => n || '').join('')
|
||||
const { lang, version, product: productPath } = match.groups
|
||||
const productURL = [lang, version, productPath].map((n) => n || '').join('')
|
||||
productHeadLink.href = `${linkURL.origin}/${productURL}`
|
||||
}
|
||||
productHead.style.display = 'block'
|
||||
|
||||
@@ -483,8 +483,8 @@ async function commentOnPR(core: CoreInject, octokit: Octokit, flaws: LinkFlaw[]
|
||||
issue_number: pullNumber,
|
||||
})
|
||||
let previousCommentId
|
||||
for (const { body, id } of data) {
|
||||
if (body && body.includes(findAgainSymbol)) {
|
||||
for (const { body: commentBody, id } of data) {
|
||||
if (commentBody && commentBody.includes(findAgainSymbol)) {
|
||||
previousCommentId = id
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,7 +20,7 @@ export function generateNewJSON(
|
||||
|
||||
let countChanges = 0
|
||||
for (const [identifier, url] of Object.entries(destination)) {
|
||||
const check = checks.find((check) => check.identifier === identifier)
|
||||
const check = checks.find((foundCheck) => foundCheck.identifier === identifier)
|
||||
if (check) {
|
||||
// At the moment, the only possible correction is if the URL is
|
||||
// found but required a redirect.
|
||||
|
||||
@@ -412,8 +412,8 @@ try {
|
||||
|
||||
// Given input: https://docs.github.com/en/copilot/managing-copilot/
|
||||
// Use: copilot/managing-copilot
|
||||
function getCleanPath(providedPath: string): string {
|
||||
let clean = providedPath
|
||||
function getCleanPath(inputPath: string): string {
|
||||
let clean = inputPath
|
||||
const cleanArr = clean.split('?') // remove query params
|
||||
if (cleanArr.length > 1) cleanArr.pop()
|
||||
clean = cleanArr.join('/')
|
||||
@@ -431,29 +431,29 @@ function getCleanPath(providedPath: string): string {
|
||||
return clean
|
||||
}
|
||||
|
||||
function getVersion(cleanPath: string): string {
|
||||
const pathParts = cleanPath.split('/')
|
||||
const version = ENTERPRISE_REGEX.test(pathParts[0]) ? pathParts[0] : FREE_PRO_TEAM
|
||||
return version
|
||||
function getVersion(pathToCheck: string): string {
|
||||
const pathParts = pathToCheck.split('/')
|
||||
const versionString = ENTERPRISE_REGEX.test(pathParts[0]) ? pathParts[0] : FREE_PRO_TEAM
|
||||
return versionString
|
||||
}
|
||||
|
||||
function removeVersionSegment(cleanPath: string, version: string): string {
|
||||
if (version === FREE_PRO_TEAM) return cleanPath
|
||||
const pathParts = cleanPath.split('/')
|
||||
function removeVersionSegment(pathToProcess: string, versionString: string): string {
|
||||
if (versionString === FREE_PRO_TEAM) return pathToProcess
|
||||
const pathParts = pathToProcess.split('/')
|
||||
pathParts.shift()
|
||||
if (!pathParts.length) return 'index'
|
||||
return pathParts.join('/')
|
||||
}
|
||||
|
||||
// Try to find the path in the list of valid pages at https://docs.github.com/api/pagelist/en
|
||||
async function validatePath(cleanPath: string, version: string): Promise<void> {
|
||||
async function validatePath(pathToValidate: string, versionToValidate: string): Promise<void> {
|
||||
// Only Kusto uses 'index' for the homepage; the Docs API uses '/en'
|
||||
const basePath = cleanPath === 'index' ? '' : cleanPath
|
||||
const basePath = pathToValidate === 'index' ? '' : pathToValidate
|
||||
|
||||
const pathToCheck =
|
||||
version === FREE_PRO_TEAM
|
||||
versionToValidate === FREE_PRO_TEAM
|
||||
? path.join('/', 'en', basePath)
|
||||
: path.join('/', 'en', version, basePath)
|
||||
: path.join('/', 'en', versionToValidate, basePath)
|
||||
|
||||
let data: string
|
||||
try {
|
||||
|
||||
@@ -141,9 +141,9 @@ async function handleError(
|
||||
// Report to Failbot AFTER responding to the user
|
||||
await logException(error, req)
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('An error occurred in the error handling middleware!', error)
|
||||
next(error)
|
||||
} catch (handlingError) {
|
||||
console.error('An error occurred in the error handling middleware!', handlingError)
|
||||
next(handlingError)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
@@ -116,7 +116,7 @@ describe('getAutomaticRequestLogger', () => {
|
||||
|
||||
// Create a completely isolated test environment for each iteration
|
||||
const isolatedLogs: string[] = []
|
||||
const originalConsoleLog = console.log
|
||||
const savedConsoleLog = console.log
|
||||
|
||||
// Replace console.log with isolated capture
|
||||
console.log = vi.fn((message: string) => {
|
||||
@@ -174,7 +174,7 @@ describe('getAutomaticRequestLogger', () => {
|
||||
expect(isolatedLogs[0]).toContain(testCase.expectedInLog)
|
||||
} finally {
|
||||
// Always restore console.log
|
||||
console.log = originalConsoleLog
|
||||
console.log = savedConsoleLog
|
||||
}
|
||||
}
|
||||
})
|
||||
@@ -281,7 +281,7 @@ describe('getAutomaticRequestLogger', () => {
|
||||
|
||||
// Create isolated log capture for this specific test
|
||||
const isolatedLogs: string[] = []
|
||||
const originalConsoleLog = console.log
|
||||
const savedConsoleLog = console.log
|
||||
|
||||
console.log = vi.fn((message: string) => {
|
||||
isolatedLogs.push(message)
|
||||
@@ -299,7 +299,7 @@ describe('getAutomaticRequestLogger', () => {
|
||||
expect(isolatedLogs).toHaveLength(0)
|
||||
} finally {
|
||||
// Always restore console.log
|
||||
console.log = originalConsoleLog
|
||||
console.log = savedConsoleLog
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
@@ -312,14 +312,14 @@ function tryReplacements(prefix: string, suffix: string, context: Context): stri
|
||||
return undefined
|
||||
}
|
||||
|
||||
const test = (suffix: string): boolean => {
|
||||
const test = (testSuffix: string): boolean => {
|
||||
// This is a generally broad search and replace and this particular
|
||||
// replacement has never been present in api documentation only enterprise
|
||||
// admin documentation, so we're excluding the REST api pages
|
||||
if (suffix.includes('/rest')) {
|
||||
if (testSuffix.includes('/rest')) {
|
||||
return false
|
||||
}
|
||||
const candidateAsRedirect = prefix + suffix
|
||||
const candidateAsRedirect = prefix + testSuffix
|
||||
const candidateAsURL = `/en${candidateAsRedirect}`
|
||||
return candidateAsRedirect in redirects || candidateAsURL in pages
|
||||
}
|
||||
|
||||
@@ -71,12 +71,10 @@ export default async function ghesReleaseNotesContext(
|
||||
// notes instead.
|
||||
enContext.ghesReleases = formatReleases(ghesReleaseNotes)
|
||||
|
||||
const matchedReleaseNotes = enContext.ghesReleases!.find(
|
||||
(r) => r.version === requestedRelease,
|
||||
)
|
||||
if (!matchedReleaseNotes) throw new Error('Release notes not found')
|
||||
const currentReleaseNotes = matchedReleaseNotes.patches
|
||||
return renderPatchNotes(currentReleaseNotes, enContext)
|
||||
const enMatchedNotes = enContext.ghesReleases!.find((r) => r.version === requestedRelease)
|
||||
if (!enMatchedNotes) throw new Error('Release notes not found')
|
||||
const enCurrentNotes = enMatchedNotes.patches
|
||||
return renderPatchNotes(enCurrentNotes, enContext)
|
||||
},
|
||||
)
|
||||
} finally {
|
||||
|
||||
@@ -120,7 +120,7 @@ export async function getRestMiniTocItems(
|
||||
category: string,
|
||||
subCategory: string,
|
||||
apiVersion: string | undefined,
|
||||
restOperations: Operation[],
|
||||
operations: Operation[],
|
||||
language: string,
|
||||
version: string,
|
||||
context: Context,
|
||||
@@ -148,7 +148,7 @@ export async function getRestMiniTocItems(
|
||||
|
||||
const categoryData = apiData.get(category)!
|
||||
if (!categoryData.get(subCategory)) {
|
||||
const titles = restOperations.map((operation: Operation) => operation.title)
|
||||
const titles = operations.map((operation: Operation) => operation.title)
|
||||
const restOperationsMiniTocItems = await getAutomatedPageMiniTocItems(titles, context, 3)
|
||||
categoryData.set(subCategory, {
|
||||
restOperationsMiniTocItems,
|
||||
|
||||
@@ -163,5 +163,5 @@ function difference(obj1: Record<string, string[]>, obj2: Record<string, string[
|
||||
export function getAutomatedMarkdownFiles(rootDir: string): string[] {
|
||||
return walkFiles(rootDir, '.md')
|
||||
.filter((file) => !file.includes('index.md'))
|
||||
.filter((file) => !nonAutomatedRestPaths.some((path) => file.includes(path)))
|
||||
.filter((file) => !nonAutomatedRestPaths.some((excludePath) => file.includes(excludePath)))
|
||||
}
|
||||
|
||||
@@ -151,23 +151,21 @@ export async function getOpenApiSchemaFiles(
|
||||
// bundling the OpenAPI in github/github
|
||||
const schemaNames = schemas.map((schema) => path.basename(schema, '.json'))
|
||||
|
||||
const OPENAPI_VERSION_NAMES = Object.keys(allVersions).map(
|
||||
(elem) => allVersions[elem].openApiVersionName,
|
||||
)
|
||||
const versionNames = Object.keys(allVersions).map((elem) => allVersions[elem].openApiVersionName)
|
||||
|
||||
for (const schema of schemaNames) {
|
||||
const schemaBasename = `${schema}.json`
|
||||
// If the version doesn't have calendar date versioning
|
||||
// it should have an exact match with one of the versions defined
|
||||
// in the allVersions object.
|
||||
if (OPENAPI_VERSION_NAMES.includes(schema)) {
|
||||
if (versionNames.includes(schema)) {
|
||||
webhookSchemas.push(schemaBasename)
|
||||
}
|
||||
|
||||
// If the schema version has calendar date versioning, then one of
|
||||
// the versions defined in allVersions should be a substring of the
|
||||
// schema version. This means the schema version is a supported version
|
||||
if (OPENAPI_VERSION_NAMES.some((elem) => schema.startsWith(elem))) {
|
||||
if (versionNames.some((elem) => schema.startsWith(elem))) {
|
||||
// If the schema being evaluated is a calendar-date version, then
|
||||
// there would only be one exact match in the list of schema names.
|
||||
// If the schema being evaluated is a non-calendar-date version, then
|
||||
|
||||
@@ -190,10 +190,12 @@ describe('OpenAPI schema validation', () => {
|
||||
})
|
||||
})
|
||||
|
||||
async function findOperation(version: string, method: string, path: string) {
|
||||
async function findOperation(version: string, method: string, requestPath: string) {
|
||||
const allOperations = await getFlatListOfOperations(version)
|
||||
return allOperations.find((operation) => {
|
||||
return operation.requestPath === path && operation.verb.toLowerCase() === method.toLowerCase()
|
||||
return (
|
||||
operation.requestPath === requestPath && operation.verb.toLowerCase() === method.toLowerCase()
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
@@ -23,7 +23,7 @@ describe('REST references docs', () => {
|
||||
.map((i, h2) => $(h2).attr('id'))
|
||||
.get()
|
||||
const schemaSlugs = checksRestOperations.map((operation) => slug(operation.title))
|
||||
expect(schemaSlugs.every((slug) => domH2Ids.includes(slug))).toBe(true)
|
||||
expect(schemaSlugs.every((operationSlug) => domH2Ids.includes(operationSlug))).toBe(true)
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
@@ -187,47 +187,45 @@ export function SearchOverlay({
|
||||
// Combine options for key navigation
|
||||
const [combinedOptions, generalOptionsWithViewStatus, aiOptionsWithUserInput] = useMemo(() => {
|
||||
setAnnouncement('')
|
||||
let generalOptionsWithViewStatus = [...generalSearchResults]
|
||||
const aiOptionsWithUserInput = [...userInputOptions, ...filteredAIOptions]
|
||||
const combinedOptions = [] as Array<{
|
||||
let generalWithView = [...generalSearchResults]
|
||||
const aiWithUser = [...userInputOptions, ...filteredAIOptions]
|
||||
const combined = [] as Array<{
|
||||
group: 'general' | 'ai' | string
|
||||
url?: string
|
||||
option: AutocompleteSearchHitWithUserQuery | GeneralSearchHitWithOptions
|
||||
}>
|
||||
|
||||
if (generalSearchResults.length > 0) {
|
||||
generalOptionsWithViewStatus.push({
|
||||
generalWithView.push({
|
||||
title: t('search.overlay.view_all_search_results'),
|
||||
isViewAllResults: true,
|
||||
} as any)
|
||||
} else if (autoCompleteSearchError) {
|
||||
if (urlSearchInputQuery.trim() !== '') {
|
||||
generalOptionsWithViewStatus.push({
|
||||
generalWithView.push({
|
||||
...(userInputOptions[0] || {}),
|
||||
isSearchDocsOption: true,
|
||||
} as unknown as GeneralSearchHit)
|
||||
}
|
||||
} else if (urlSearchInputQuery.trim() !== '' && !searchLoading) {
|
||||
setAnnouncement(t('search.overlay.no_results_found_announcement'))
|
||||
generalOptionsWithViewStatus.push({
|
||||
generalWithView.push({
|
||||
title: t('search.overlay.no_results_found'),
|
||||
isNoResultsFound: true,
|
||||
} as any)
|
||||
} else {
|
||||
generalOptionsWithViewStatus = []
|
||||
generalWithView = []
|
||||
}
|
||||
// NOTE: Order of combinedOptions is important, since 'selectedIndex' is used to navigate the combinedOptions array
|
||||
// Add general options _before_ AI options
|
||||
combinedOptions.push(
|
||||
...generalOptionsWithViewStatus.map((option) => ({ group: 'general', option })),
|
||||
)
|
||||
combined.push(...generalWithView.map((option) => ({ group: 'general', option })))
|
||||
// On AI Error, don't include AI suggestions, only user input
|
||||
if (!aiSearchError && !isAskAIState) {
|
||||
combinedOptions.push(...aiOptionsWithUserInput.map((option) => ({ group: 'ai', option })))
|
||||
combined.push(...aiWithUser.map((option) => ({ group: 'ai', option })))
|
||||
} else if (isAskAIState && !aiCouldNotAnswer) {
|
||||
// When "ask ai" state is reached, we have references that are ActionList items.
|
||||
// We want to navigate these items via the keyboard, so include them in the combinedOptions array
|
||||
combinedOptions.push(
|
||||
combined.push(
|
||||
...aiReferences.map((option) => ({
|
||||
group: 'reference', // The references are actually article URLs that we want to navigate to
|
||||
url: option.url,
|
||||
@@ -240,7 +238,7 @@ export function SearchOverlay({
|
||||
)
|
||||
}
|
||||
|
||||
return [combinedOptions, generalOptionsWithViewStatus, aiOptionsWithUserInput]
|
||||
return [combined, generalWithView, aiWithUser]
|
||||
}, [
|
||||
generalSearchResults,
|
||||
totalGeneralSearchResults,
|
||||
|
||||
@@ -159,12 +159,12 @@ function ResultsPagination({ page, totalPages }: { page: number; totalPages: num
|
||||
}
|
||||
}, [asPath])
|
||||
|
||||
function hrefBuilder(page: number) {
|
||||
function hrefBuilder(pageNumber: number) {
|
||||
const params = new URLSearchParams(asPathQuery)
|
||||
if (page === 1) {
|
||||
if (pageNumber === 1) {
|
||||
params.delete('page')
|
||||
} else {
|
||||
params.set('page', `${page}`)
|
||||
params.set('page', `${pageNumber}`)
|
||||
}
|
||||
return `/${router.locale}${asPathRoot}?${params}`
|
||||
}
|
||||
@@ -176,22 +176,22 @@ function ResultsPagination({ page, totalPages }: { page: number; totalPages: num
|
||||
pageCount={Math.min(totalPages, 10)}
|
||||
currentPage={page}
|
||||
hrefBuilder={hrefBuilder}
|
||||
onPageChange={(event, page) => {
|
||||
onPageChange={(event, pageNum) => {
|
||||
event.preventDefault()
|
||||
|
||||
const [asPathRoot, asPathQuery = ''] = router.asPath.split('#')[0].split('?')
|
||||
const params = new URLSearchParams(asPathQuery)
|
||||
if (page !== 1) {
|
||||
params.set('page', `${page}`)
|
||||
const [pathRoot, pathQuery = ''] = router.asPath.split('#')[0].split('?')
|
||||
const params = new URLSearchParams(pathQuery)
|
||||
if (pageNum !== 1) {
|
||||
params.set('page', `${pageNum}`)
|
||||
} else {
|
||||
params.delete('page')
|
||||
}
|
||||
let asPath = `/${router.locale}${asPathRoot}`
|
||||
let newPath = `/${router.locale}${pathRoot}`
|
||||
if (params.toString()) {
|
||||
asPath += `?${params}`
|
||||
newPath += `?${params}`
|
||||
}
|
||||
setAsPath(asPath)
|
||||
router.push(asPath)
|
||||
setAsPath(newPath)
|
||||
router.push(newPath)
|
||||
}}
|
||||
/>
|
||||
</Box>
|
||||
|
||||
@@ -100,11 +100,11 @@ export function getPlanVersionFromIndexVersion(indexVersion: string): string {
|
||||
// This is needed for scraping since the pages use the 'allVersions' key as their version
|
||||
export function getAllVersionsKeyFromIndexVersion(indexVersion: string): string {
|
||||
const key = Object.keys(allVersions).find(
|
||||
(key) =>
|
||||
key === indexVersion ||
|
||||
allVersions[key].shortName === indexVersion ||
|
||||
allVersions[key].plan === indexVersion ||
|
||||
allVersions[key].miscVersionName === indexVersion,
|
||||
(versionKey) =>
|
||||
versionKey === indexVersion ||
|
||||
allVersions[versionKey].shortName === indexVersion ||
|
||||
allVersions[versionKey].plan === indexVersion ||
|
||||
allVersions[versionKey].miscVersionName === indexVersion,
|
||||
)
|
||||
|
||||
if (!key) {
|
||||
|
||||
@@ -79,8 +79,8 @@ try {
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
async function main(opts: Options, args: string[]): Promise<void> {
|
||||
const texts = [args.join(' ')]
|
||||
async function main(opts: Options, textArgs: string[]): Promise<void> {
|
||||
const texts = [textArgs.join(' ')]
|
||||
if (!opts.elasticsearchUrl && !process.env.ELASTICSEARCH_URL) {
|
||||
throw new Error(
|
||||
'Must pass the elasticsearch URL option or ' +
|
||||
|
||||
@@ -98,7 +98,7 @@ export async function indexGeneralSearch(sourceDirectory: string, opts: Options)
|
||||
versionsToIndex,
|
||||
)
|
||||
|
||||
for (const language of languages) {
|
||||
for (const lang of languages) {
|
||||
let count = 0
|
||||
for (const versionKey of versionsToIndex) {
|
||||
const startTime = new Date()
|
||||
@@ -106,11 +106,11 @@ export async function indexGeneralSearch(sourceDirectory: string, opts: Options)
|
||||
const { indexName, indexAlias } = getElasticSearchIndex(
|
||||
'generalSearch',
|
||||
versionKey,
|
||||
language,
|
||||
lang,
|
||||
opts.indexPrefix || '',
|
||||
)
|
||||
|
||||
await indexVersion(client, indexName, indexAlias, language, sourceDirectory, opts)
|
||||
await indexVersion(client, indexName, indexAlias, lang, sourceDirectory, opts)
|
||||
|
||||
count++
|
||||
if (opts.staggerSeconds && count < versionsToIndex.length - 1) {
|
||||
|
||||
@@ -64,11 +64,11 @@ export async function populateIndex(
|
||||
{
|
||||
attempts,
|
||||
sleepTime,
|
||||
onError: (_, attempts, sleepTime) => {
|
||||
onError: (_, remainingAttempts, sleepMs) => {
|
||||
console.warn(
|
||||
chalk.yellow(
|
||||
`Failed to bulk index ${indexName}. Will attempt ${attempts} more times (after ${
|
||||
sleepTime / 1000
|
||||
`Failed to bulk index ${indexName}. Will attempt ${remainingAttempts} more times (after ${
|
||||
sleepMs / 1000
|
||||
}s sleep).`,
|
||||
),
|
||||
)
|
||||
|
||||
@@ -42,9 +42,9 @@ export default function handleInvalidQuerystringValues(
|
||||
for (const [key, value] of Object.entries(query)) {
|
||||
if (RECOGNIZED_VALUES_KEYS.has(key)) {
|
||||
const validValues = RECOGNIZED_VALUES[key as keyof typeof RECOGNIZED_VALUES]
|
||||
const value = query[key]
|
||||
const values = Array.isArray(value) ? value : [value]
|
||||
if (values.some((value) => typeof value === 'string' && !validValues.includes(value))) {
|
||||
const queryValue = query[key]
|
||||
const values = Array.isArray(queryValue) ? queryValue : [queryValue]
|
||||
if (values.some((val) => typeof val === 'string' && !validValues.includes(val))) {
|
||||
if (process.env.NODE_ENV === 'development') {
|
||||
console.warn(
|
||||
'Warning! Invalid query string *value* detected. %O is not one of %O',
|
||||
|
||||
@@ -53,9 +53,9 @@ describe('invalid query strings', () => {
|
||||
expect(res.headers.location).toBe('/en')
|
||||
// But note that it only applies to the home page!
|
||||
{
|
||||
const url = `/en/get-started?${randomCharacters(8)}`
|
||||
const res = await get(url)
|
||||
expect(res.statusCode).toBe(200)
|
||||
const nestedUrl = `/en/get-started?${randomCharacters(8)}`
|
||||
const nestedRes = await get(nestedUrl)
|
||||
expect(nestedRes.statusCode).toBe(200)
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
@@ -19,7 +19,7 @@ const platforms = [
|
||||
function showPlatformSpecificContent(platform: string) {
|
||||
const markdowns = Array.from(document.querySelectorAll<HTMLElement>('.ghd-tool'))
|
||||
markdowns
|
||||
.filter((el) => platforms.some((platform) => el.classList.contains(platform.value)))
|
||||
.filter((el) => platforms.some((platformValue) => el.classList.contains(platformValue.value)))
|
||||
.forEach((el) => {
|
||||
el.style.display = el.classList.contains(platform) ? '' : 'none'
|
||||
|
||||
@@ -36,7 +36,7 @@ function showPlatformSpecificContent(platform: string) {
|
||||
// example: <span class="platform-mac">inline content</span>
|
||||
const platformEls = Array.from(
|
||||
document.querySelectorAll<HTMLElement>(
|
||||
platforms.map((platform) => `.platform-${platform.value}`).join(', '),
|
||||
platforms.map((platformOption) => `.platform-${platformOption.value}`).join(', '),
|
||||
),
|
||||
)
|
||||
platformEls.forEach((el) => {
|
||||
|
||||
@@ -14,7 +14,7 @@ import { InArticlePicker } from './InArticlePicker'
|
||||
function showToolSpecificContent(tool: string, supportedTools: Array<string>) {
|
||||
const markdowns = Array.from(document.querySelectorAll<HTMLElement>('.ghd-tool'))
|
||||
markdowns
|
||||
.filter((el) => supportedTools.some((tool) => el.classList.contains(tool)))
|
||||
.filter((el) => supportedTools.some((toolName) => el.classList.contains(toolName)))
|
||||
.forEach((el) => {
|
||||
el.style.display = el.classList.contains(tool) ? '' : 'none'
|
||||
|
||||
@@ -31,7 +31,7 @@ function showToolSpecificContent(tool: string, supportedTools: Array<string>) {
|
||||
// example: <span class="tool-webui">inline content</span>
|
||||
const toolEls = Array.from(
|
||||
document.querySelectorAll<HTMLElement>(
|
||||
supportedTools.map((tool) => `.tool-${tool}`).join(', '),
|
||||
supportedTools.map((toolOption) => `.tool-${toolOption}`).join(', '),
|
||||
),
|
||||
)
|
||||
toolEls.forEach((el) => {
|
||||
|
||||
@@ -37,7 +37,7 @@ function getFeaturesByVersion(currentVersion: string): Record<string, boolean> {
|
||||
allFeatures = getDeepDataByLanguage('features', 'en') as Record<string, FeatureVersions>
|
||||
}
|
||||
|
||||
const features: {
|
||||
const featureFlags: {
|
||||
[feature: string]: boolean
|
||||
} = {}
|
||||
// Determine whether the currentVersion belongs to the list of versions the feature is available in.
|
||||
@@ -51,9 +51,9 @@ function getFeaturesByVersion(currentVersion: string): Record<string, boolean> {
|
||||
// Adding the resulting boolean to the context object gives us the ability to use
|
||||
// `{% if featureName ... %}` conditionals in content files.
|
||||
const isFeatureAvailableInCurrentVersion = applicableVersions.includes(currentVersion)
|
||||
features[featureName] = isFeatureAvailableInCurrentVersion
|
||||
featureFlags[featureName] = isFeatureAvailableInCurrentVersion
|
||||
}
|
||||
cache.set(currentVersion, features)
|
||||
cache.set(currentVersion, featureFlags)
|
||||
}
|
||||
|
||||
return cache.get(currentVersion)
|
||||
|
||||
@@ -78,7 +78,7 @@ describe('webhooks events and payloads', () => {
|
||||
payloadExampleElem.each((i, elem) => {
|
||||
const siblings = $(elem)
|
||||
.nextUntil('[id^=webhook-payload-example]')
|
||||
.filter((i, elem) => $(elem).hasClass('height-constrained-code-block'))
|
||||
.filter((idx, sibling) => $(sibling).hasClass('height-constrained-code-block'))
|
||||
expect(siblings.length).toBeGreaterThan(0)
|
||||
})
|
||||
}
|
||||
|
||||
@@ -162,8 +162,8 @@ function getChangedContentFiles(): string[] {
|
||||
})
|
||||
}
|
||||
|
||||
function makeURL(path: string): string {
|
||||
return `http://localhost:4000${path}`
|
||||
function makeURL(urlPath: string): string {
|
||||
return `http://localhost:4000${urlPath}`
|
||||
}
|
||||
|
||||
async function waitForServer(): Promise<void> {
|
||||
|
||||
@@ -101,13 +101,13 @@ export async function linkReports({
|
||||
}
|
||||
|
||||
// Comment on all previous reports that are still open
|
||||
for (const previousReport of previousReports) {
|
||||
if (previousReport.state === 'closed' || previousReport.html_url === newReport.html_url) {
|
||||
for (const oldReport of previousReports) {
|
||||
if (oldReport.state === 'closed' || oldReport.html_url === newReport.html_url) {
|
||||
continue
|
||||
}
|
||||
|
||||
// If an old report is not assigned to someone we close it
|
||||
const shouldClose = !previousReport.assignees?.length
|
||||
const shouldClose = !oldReport.assignees?.length
|
||||
let body = `➡️ [Newer report](${newReport.html_url})`
|
||||
if (shouldClose) {
|
||||
body += '\n\nClosing in favor of newer report since there are no assignees on this issue'
|
||||
@@ -116,14 +116,12 @@ export async function linkReports({
|
||||
await octokit.rest.issues.createComment({
|
||||
owner,
|
||||
repo,
|
||||
issue_number: previousReport.number,
|
||||
issue_number: oldReport.number,
|
||||
body,
|
||||
})
|
||||
core.info(
|
||||
`Linked old report to new report via comment on old report: #${previousReport.number}.`,
|
||||
)
|
||||
core.info(`Linked old report to new report via comment on old report: #${oldReport.number}.`)
|
||||
} catch (error) {
|
||||
core.setFailed(`Error commenting on previousReport, #${previousReport.number}`)
|
||||
core.setFailed(`Error commenting on previousReport, #${oldReport.number}`)
|
||||
throw error
|
||||
}
|
||||
if (shouldClose) {
|
||||
@@ -131,12 +129,12 @@ export async function linkReports({
|
||||
await octokit.rest.issues.update({
|
||||
owner,
|
||||
repo,
|
||||
issue_number: previousReport.number,
|
||||
issue_number: oldReport.number,
|
||||
state: 'closed',
|
||||
})
|
||||
core.info(`Closing old report: #${previousReport.number} because it doesn't have assignees`)
|
||||
core.info(`Closing old report: #${oldReport.number} because it doesn't have assignees`)
|
||||
} catch (error) {
|
||||
core.setFailed(`Error closing previousReport, #${previousReport.number}`)
|
||||
core.setFailed(`Error closing previousReport, #${oldReport.number}`)
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@ import { graphql } from '@octokit/graphql'
|
||||
// Pull out the node ID of a project field
|
||||
export function findFieldID(fieldName: string, data: Record<string, any>) {
|
||||
const field = data.organization.projectV2.fields.nodes.find(
|
||||
(field: Record<string, any>) => field.name === fieldName,
|
||||
(fieldNode: Record<string, any>) => fieldNode.name === fieldName,
|
||||
)
|
||||
|
||||
if (field && field.id) {
|
||||
@@ -23,14 +23,14 @@ export function findSingleSelectID(
|
||||
data: Record<string, any>,
|
||||
) {
|
||||
const field = data.organization.projectV2.fields.nodes.find(
|
||||
(field: Record<string, any>) => field.name === fieldName,
|
||||
(fieldData: Record<string, any>) => fieldData.name === fieldName,
|
||||
)
|
||||
if (!field) {
|
||||
throw new Error(`A field called "${fieldName}" was not found. Check if the field was renamed.`)
|
||||
}
|
||||
|
||||
const singleSelect = field.options.find(
|
||||
(field: Record<string, any>) => field.name === singleSelectName,
|
||||
(option: Record<string, any>) => option.name === singleSelectName,
|
||||
)
|
||||
|
||||
if (singleSelect && singleSelect.id) {
|
||||
@@ -203,7 +203,7 @@ export function generateUpdateProjectV2ItemFieldMutation({
|
||||
// Build the mutation to update a single project field
|
||||
// Specify literal=true to indicate that the value should be used as a string, not a variable
|
||||
function generateMutationToUpdateField({
|
||||
item,
|
||||
item: itemId,
|
||||
fieldID,
|
||||
value,
|
||||
fieldType,
|
||||
@@ -220,12 +220,12 @@ export function generateUpdateProjectV2ItemFieldMutation({
|
||||
// Strip all non-alphanumeric out of the item ID when creating the mutation ID to avoid a GraphQL parsing error
|
||||
// (statistically, this should still give us a unique mutation ID)
|
||||
return `
|
||||
set_${fieldID.slice(1)}_item_${item.replaceAll(
|
||||
set_${fieldID.slice(1)}_item_${itemId.replaceAll(
|
||||
/[^a-z0-9]/g,
|
||||
'',
|
||||
)}: updateProjectV2ItemFieldValue(input: {
|
||||
projectId: $project
|
||||
itemId: "${item}"
|
||||
itemId: "${itemId}"
|
||||
fieldId: ${fieldID}
|
||||
value: { ${parsedValue} }
|
||||
}) {
|
||||
|
||||
@@ -16,7 +16,10 @@ export default function walkFiles(
|
||||
const walkSyncOpts = { includeBasePath: true, directories: false }
|
||||
|
||||
return walk(dir, walkSyncOpts)
|
||||
.filter((file) => extensions.some((ext) => file.endsWith(ext)) && !file.endsWith('README.md'))
|
||||
.filter(
|
||||
(file) =>
|
||||
extensions.some((extension) => file.endsWith(extension)) && !file.endsWith('README.md'),
|
||||
)
|
||||
.filter((file) => (opts.includeEarlyAccess ? file : !file.includes('/early-access/')))
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user