1
0
mirror of synced 2025-12-19 09:57:42 -05:00

Remove 'github/no-then' eslint rule (#58220)

This commit is contained in:
Kevin Heis
2025-10-30 12:53:05 -07:00
committed by GitHub
parent db3f4bfa9f
commit 23c9d95cb9
17 changed files with 186 additions and 246 deletions

View File

@@ -96,7 +96,6 @@ export default [
camelcase: 'off', // Many gh apis use underscores, 600+ uses
// Disabled rules to review
'github/no-then': 'off', // 30+
'@typescript-eslint/ban-ts-comment': 'off', // 50+
'no-shadow': 'off', // 150+
'github/array-foreach': 'off', // 250+

View File

@@ -192,4 +192,8 @@ function determineContentType(relativePath: string, legacyType: string): string
return OTHER_TYPE
}
main().catch(console.error)
try {
await main()
} catch (error) {
console.error(error)
}

View File

@@ -45,14 +45,13 @@ async function fetchCookies(): Promise<DotcomCookies> {
}
// Make a single fetch request to the backend.
inFlightPromise = fetch(GET_COOKIES_ENDPOINT)
.then((response) => {
inFlightPromise = (async () => {
try {
const response = await fetch(GET_COOKIES_ENDPOINT)
if (!response.ok) {
throw new Error(`Failed to fetch cookies: ${response.statusText}`)
}
return response.json() as Promise<DotcomCookies>
})
.then((data) => {
const data = (await response.json()) as DotcomCookies
cachedCookies = data
// Store the fetched cookies in local storage for future use.
try {
@@ -61,8 +60,7 @@ async function fetchCookies(): Promise<DotcomCookies> {
console.error('Error storing cookies in local storage:', e)
}
return data
})
.catch((err) => {
} catch (err) {
console.error('Error fetching cookies:', err)
// On failure, return default values.
const defaultCookies: DotcomCookies = {
@@ -70,11 +68,11 @@ async function fetchCookies(): Promise<DotcomCookies> {
}
cachedCookies = defaultCookies
return defaultCookies
})
.finally(() => {
} finally {
// Clear the in-flight promise regardless of success or failure.
inFlightPromise = null
})
}
})()
return inFlightPromise
}

View File

@@ -82,8 +82,12 @@ const asyncMiddleware =
<TReq extends Request = Request, T = void>(
fn: (req: TReq, res: Response, next: NextFunction) => T | Promise<T>,
) =>
(req: Request, res: Response, next: NextFunction) => {
Promise.resolve(fn(req as TReq, res, next)).catch(next)
async (req: Request, res: Response, next: NextFunction) => {
try {
await fn(req as TReq, res, next)
} catch (error) {
next(error)
}
}
export default function index(app: Express) {

View File

@@ -96,21 +96,23 @@ async function main() {
.listen(port, async () => {
console.log(`started server on ${host}`)
await scrape({
urls,
urlFilter: (url: string) => {
// Do not download assets from other hosts like S3 or octodex.github.com
// (this will keep them as remote references in the downloaded pages)
return url.startsWith(`http://localhost:${port}/`)
},
directory: tmpArchivalDirectory,
filenameGenerator: 'bySiteStructure',
requestConcurrency: 6,
plugins: [new RewriteAssetPathsPlugin(tmpArchivalDirectory, localDev, GH_PAGES_URL)],
}).catch((err: Error) => {
try {
await scrape({
urls,
urlFilter: (url: string) => {
// Do not download assets from other hosts like S3 or octodex.github.com
// (this will keep them as remote references in the downloaded pages)
return url.startsWith(`http://localhost:${port}/`)
},
directory: tmpArchivalDirectory,
filenameGenerator: 'bySiteStructure',
requestConcurrency: 6,
plugins: [new RewriteAssetPathsPlugin(tmpArchivalDirectory, localDev, GH_PAGES_URL)],
})
} catch (err) {
console.error('scraping error')
console.error(err)
})
}
fs.renameSync(
path.join(tmpArchivalDirectory, `/localhost_${port}`),

View File

@@ -68,7 +68,9 @@ async function main(): Promise<void> {
`)
}
main().catch((error) => {
try {
await main()
} catch (error) {
console.error('Error:', error)
process.exit(1)
})
}

View File

@@ -248,16 +248,19 @@ function popoverWrap(element: HTMLLinkElement, filledCallback?: (popover: HTMLDi
const { pathname } = new URL(element.href)
fetch(`/api/article/meta?${new URLSearchParams({ pathname })}`, {
headers: {
'X-Request-Source': 'hovercards',
},
}).then(async (response) => {
async function fetchAndFillPopover() {
const response = await fetch(`/api/article/meta?${new URLSearchParams({ pathname })}`, {
headers: {
'X-Request-Source': 'hovercards',
},
})
if (response.ok) {
const meta = (await response.json()) as PageMetadata
fillPopover(element, meta, filledCallback)
}
})
}
fetchAndFillPopover()
}
function fillPopover(

View File

@@ -128,10 +128,12 @@ async function limitConcurrency<T, R>(
const executing = new Set<Promise<R>>()
for (const item of items) {
const promise = asyncFn(item).then((result) => {
const createPromise = async () => {
const result = await asyncFn(item)
executing.delete(promise)
return result
})
}
const promise = createPromise()
results.push(promise)
executing.add(promise)

View File

@@ -39,21 +39,6 @@ interface CliOptions {
allVersions?: boolean
}
interface QueryResults {
views?: string
viewsDocset?: string
users?: string
usersDocset?: string
viewDuration?: string
viewDurationDocset?: string
bounces?: string
bouncesDocset?: string
score?: string
scoreDocset?: string
exits?: string
exitsDocset?: string
}
interface JsonOutput {
daysRange: string
startDate: string
@@ -222,130 +207,8 @@ async function main(): Promise<void> {
console.log(`\n\nSkipping comparison, since '${cleanPath}' is already a docset.\n`)
}
// Create query promises for all requested metrics
const queryPromises: Promise<void>[] = []
const results: QueryResults = {}
// Setup all the promises for parallel execution
if (options.views) {
const queryType = 'views'
queryPromises.push(
getViews(queryPaths, client, dates, version, options.verbose, queryType).then((data) => {
results.views = data
}),
)
if (options.showDocset) {
const queryType = 'docset views'
queryPromises.push(
getViews(docsetPath, client, dates, version, options.verbose, queryType).then((data) => {
results.viewsDocset = data
}),
)
}
}
if (options.users) {
const queryType = 'users'
queryPromises.push(
getUsers(queryPaths, client, dates, version, options.verbose, queryType).then((data) => {
results.users = data
}),
)
if (options.showDocset) {
const queryType = 'docset users'
queryPromises.push(
getUsers(docsetPath, client, dates, version, options.verbose, queryType).then((data) => {
results.usersDocset = data
}),
)
}
}
if (options.viewDuration) {
const queryType = 'view duration'
queryPromises.push(
getViewDuration(queryPaths, client, dates, version, options.verbose, queryType).then(
(data) => {
results.viewDuration = data
},
),
)
if (options.showDocset) {
const queryType = 'docset view duration'
queryPromises.push(
getViewDuration(docsetPath, client, dates, version, options.verbose, queryType).then(
(data) => {
results.viewDurationDocset = data
},
),
)
}
}
if (options.bounces) {
const queryType = 'bounces'
queryPromises.push(
getBounces(queryPaths, client, dates, version, options.verbose, queryType).then((data) => {
results.bounces = data
}),
)
if (options.showDocset) {
const queryType = 'docset bounces'
queryPromises.push(
getBounces(docsetPath, client, dates, version, options.verbose, queryType).then(
(data) => {
results.bouncesDocset = data
},
),
)
}
}
if (options.score) {
const queryType = 'score'
queryPromises.push(
getScore(queryPaths, client, dates, version, options.verbose, queryType).then((data) => {
results.score = data
}),
)
if (options.showDocset) {
const queryType = 'docset score'
queryPromises.push(
getScore(docsetPath, client, dates, version, options.verbose, queryType).then((data) => {
results.scoreDocset = data
}),
)
}
}
if (options.exits) {
const queryType = 'exits'
queryPromises.push(
getExitsToSupport(queryPaths, client, dates, version, options.verbose, queryType).then(
(data) => {
results.exits = data
},
),
)
if (options.showDocset) {
const queryType = 'docset exits'
queryPromises.push(
getExitsToSupport(docsetPath, client, dates, version, options.verbose, queryType).then(
(data) => {
results.exitsDocset = data
},
),
)
}
}
// Execute all queries in parallel
await Promise.all(queryPromises)
spinner.succeed('Data retrieved successfully!\n')
// Extract all results from the results object
const {
// Execute all queries in parallel and destructure results
const [
views,
viewsDocset,
users,
@@ -358,7 +221,53 @@ async function main(): Promise<void> {
scoreDocset,
exits,
exitsDocset,
} = results
] = await Promise.all([
options.views
? getViews(queryPaths, client, dates, version, options.verbose, 'views')
: undefined,
options.views && options.showDocset
? getViews(docsetPath, client, dates, version, options.verbose, 'docset views')
: undefined,
options.users
? getUsers(queryPaths, client, dates, version, options.verbose, 'users')
: undefined,
options.users && options.showDocset
? getUsers(docsetPath, client, dates, version, options.verbose, 'docset users')
: undefined,
options.viewDuration
? getViewDuration(queryPaths, client, dates, version, options.verbose, 'view duration')
: undefined,
options.viewDuration && options.showDocset
? getViewDuration(
docsetPath,
client,
dates,
version,
options.verbose,
'docset view duration',
)
: undefined,
options.bounces
? getBounces(queryPaths, client, dates, version, options.verbose, 'bounces')
: undefined,
options.bounces && options.showDocset
? getBounces(docsetPath, client, dates, version, options.verbose, 'docset bounces')
: undefined,
options.score
? getScore(queryPaths, client, dates, version, options.verbose, 'score')
: undefined,
options.score && options.showDocset
? getScore(docsetPath, client, dates, version, options.verbose, 'docset score')
: undefined,
options.exits
? getExitsToSupport(queryPaths, client, dates, version, options.verbose, 'exits')
: undefined,
options.exits && options.showDocset
? getExitsToSupport(docsetPath, client, dates, version, options.verbose, 'docset exits')
: undefined,
])
spinner.succeed('Data retrieved successfully!\n')
// Output JSON and exit
if (options.json) {
@@ -491,11 +400,13 @@ async function main(): Promise<void> {
}
}
main().catch((error) => {
try {
await main()
} catch (error) {
console.error(red('Unexpected error:'))
console.error(error)
process.exit(1)
})
}
/* -------- UTILITY FUNCTIONS -------- */

View File

@@ -4,5 +4,11 @@ import type { NextFunction } from 'express'
// This matches the original JavaScript behavior while providing some type safety
// The assertion is necessary because Express middleware can have various request/response types
export default function catchMiddlewareError(fn: any) {
return (req: any, res: any, next: NextFunction) => Promise.resolve(fn(req, res, next)).catch(next)
return async (req: any, res: any, next: NextFunction) => {
try {
await fn(req, res, next)
} catch (error) {
next(error)
}
}
}

View File

@@ -72,10 +72,12 @@ program
const options = program.opts<Options>()
const args: string[] = program.args
main(options, args).catch((err) => {
try {
await main(options, args)
} catch (err) {
console.error(chalk.red('Error:'), err)
process.exit(1)
})
}
async function main(opts: Options, args: string[]): Promise<void> {
const texts = [args.join(' ')]

View File

@@ -173,60 +173,59 @@ export default async function buildRecords(
})
// Wait for 'done' event but ignore 'error' events (they're handled by the error listener above)
return eventToPromise(waiter, 'done', { ignoreErrors: true }).then(() => {
console.log('\nrecords in index: ', records.length)
await eventToPromise(waiter, 'done', { ignoreErrors: true })
console.log('\nrecords in index: ', records.length)
// Report failed pages if any
if (failedPages.length > 0) {
const failureCount = failedPages.length
const header = chalk.bold.red(`${failureCount} page(s) failed to scrape\n\n`)
// Report failed pages if any
if (failedPages.length > 0) {
const failureCount = failedPages.length
const header = chalk.bold.red(`${failureCount} page(s) failed to scrape\n\n`)
const failureList = failedPages
.slice(0, 10) // Show first 10 failures
.map((failure, idx) => {
const number = chalk.gray(`${idx + 1}. `)
const errorType = chalk.yellow(failure.errorType)
const pathLine = failure.relativePath
? `\n${chalk.cyan(' Path: ')}${failure.relativePath}`
: ''
const urlLine = failure.url ? `\n${chalk.cyan(' URL: ')}${failure.url}` : ''
const errorLine = `\n${chalk.gray(` Error: ${failure.error}`)}`
const failureList = failedPages
.slice(0, 10) // Show first 10 failures
.map((failure, idx) => {
const number = chalk.gray(`${idx + 1}. `)
const errorType = chalk.yellow(failure.errorType)
const pathLine = failure.relativePath
? `\n${chalk.cyan(' Path: ')}${failure.relativePath}`
: ''
const urlLine = failure.url ? `\n${chalk.cyan(' URL: ')}${failure.url}` : ''
const errorLine = `\n${chalk.gray(` Error: ${failure.error}`)}`
return `${number}${errorType}${pathLine}${urlLine}${errorLine}`
})
.join('\n\n')
const remaining =
failureCount > 10 ? `\n\n${chalk.gray(`... and ${failureCount - 10} more`)}` : ''
const boxContent = header + failureList + remaining
const box = boxen(boxContent, {
title: chalk.red('⚠ Failed Pages'),
padding: 1,
borderColor: 'yellow',
return `${number}${errorType}${pathLine}${urlLine}${errorLine}`
})
.join('\n\n')
console.log(`\n${box}\n`)
const remaining =
failureCount > 10 ? `\n\n${chalk.gray(`... and ${failureCount - 10} more`)}` : ''
// Log suggestion
const boxContent = header + failureList + remaining
const box = boxen(boxContent, {
title: chalk.red('⚠ Failed Pages'),
padding: 1,
borderColor: 'yellow',
})
console.log(`\n${box}\n`)
// Log suggestion
console.log(
chalk.yellow(
`💡 Tip: These failures won't stop the scraping process. The script will continue with the remaining pages.`,
),
)
if (failedPages.some((f) => f.errorType === 'Timeout')) {
console.log(
chalk.yellow(
`💡 Tip: These failures won't stop the scraping process. The script will continue with the remaining pages.`,
chalk.gray(
` For timeout errors, try: export BUILD_RECORDS_MAX_CONCURRENT=50 (currently ${MAX_CONCURRENT})`,
),
)
if (failedPages.some((f) => f.errorType === 'Timeout')) {
console.log(
chalk.gray(
` For timeout errors, try: export BUILD_RECORDS_MAX_CONCURRENT=50 (currently ${MAX_CONCURRENT})`,
),
)
}
}
}
return {
records,
failedPages,
}
})
return {
records,
failedPages,
}
}

View File

@@ -52,12 +52,16 @@ export default function domwaiter(pages: Permalink[], opts: DomWaiterOptions = {
const limiter = new Bottleneck(opts)
pages.forEach((page) => {
limiter
.schedule(() => getPage(page, emitter, opts))
.catch((err) => {
async function schedulePage() {
try {
await limiter.schedule(() => getPage(page, emitter, opts))
} catch (err) {
// Catch any unhandled promise rejections
emitter.emit('error', err)
})
}
}
schedulePage()
})
limiter.on('idle', () => {

View File

@@ -117,15 +117,13 @@ async function main() {
}
}
main().then(
() => {
console.log('Done!')
},
(err) => {
console.error(err)
process.exit(1)
},
)
try {
await main()
console.log('Done!')
} catch (err) {
console.error(err)
process.exit(1)
}
// Convenience function to help with readability by removing this large but unneded property.
// Using any for token objects as liquidjs doesn't provide TypeScript types

View File

@@ -635,7 +635,9 @@ function generateReport(results: PageReadability[]): string {
return report
}
main().catch((error) => {
try {
await main()
} catch (error) {
console.error('Readability analysis failed:', error)
process.exit(1)
})
}

View File

@@ -225,7 +225,9 @@ async function run() {
return newItemIDs
}
run().catch((error) => {
try {
await run()
} catch (error) {
console.log(`#ERROR# ${error}`)
process.exit(1)
})
}

View File

@@ -261,7 +261,9 @@ async function run() {
export { run }
run().catch((error) => {
try {
await run()
} catch (error) {
console.log(`#ERROR# ${error}`)
process.exit(1)
})
}