@@ -1,7 +1,4 @@
|
||||
import assert from 'assert'
|
||||
import fs from 'fs/promises'
|
||||
import path from 'path'
|
||||
import { isPromise } from 'util/types'
|
||||
import { fileURLToPath } from 'url'
|
||||
|
||||
import { readCompressedJsonFileFallback } from '../read-json-file.js'
|
||||
@@ -9,51 +6,13 @@ import getExceptionRedirects from './exception-redirects.js'
|
||||
|
||||
import { latest } from '../enterprise-server-releases.js'
|
||||
|
||||
function diskMemoize(filePath, asyncFn, maxAgeSeconds = 60 * 60) {
|
||||
// The logging that the disk memoizer does is pretty useful to humans,
|
||||
// but it's only really useful when you're running `npm run dev` or
|
||||
// something.
|
||||
const log = (...args) => {
|
||||
if (process.env.NODE_ENV === 'development') console.log(...args)
|
||||
}
|
||||
return async (...args) => {
|
||||
try {
|
||||
const stats = await fs.stat(filePath)
|
||||
const ageSeconds = (new Date().getTime() - stats.mtime.getTime()) / 1000
|
||||
if (ageSeconds < maxAgeSeconds) {
|
||||
const value = JSON.parse(await fs.readFile(filePath, 'utf-8'))
|
||||
log(`Redirects disk-cache HIT on ${filePath}`)
|
||||
return value
|
||||
}
|
||||
log(`Redirects disk-cache ${filePath} too old`)
|
||||
} catch (err) {
|
||||
if (err instanceof SyntaxError) {
|
||||
console.warn(`Syntax error when trying to JSON parse the ${filePath}`, err)
|
||||
} else if (err.code !== 'ENOENT') throw err
|
||||
}
|
||||
log(`Redirects disk-cache MISS on ${filePath}`)
|
||||
const promise = asyncFn(...args)
|
||||
assert(isPromise(promise), "memoized function didn't return a promise")
|
||||
return promise.then(async (value) => {
|
||||
await fs.writeFile(
|
||||
filePath,
|
||||
JSON.stringify(value, undefined, process.env.NODE_ENV === 'development' ? 2 : 0),
|
||||
'utf-8'
|
||||
)
|
||||
|
||||
return value
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const __dirname = path.dirname(fileURLToPath(import.meta.url))
|
||||
const DISK_CACHE_FILEPATH = path.join(__dirname, '.redirects-cache.json')
|
||||
|
||||
const EXCEPTIONS_FILE = path.join(__dirname, './static/redirect-exceptions.txt')
|
||||
|
||||
// This function runs at server warmup and precompiles possible redirect routes.
|
||||
// It outputs them in key-value pairs within a neat Javascript object: { oldPath: newPath }
|
||||
const precompileRedirects = diskMemoize(DISK_CACHE_FILEPATH, async (pageList) => {
|
||||
async function precompileRedirects(pageList) {
|
||||
const allRedirects = readCompressedJsonFileFallback('./lib/redirects/static/developer.json')
|
||||
|
||||
const externalRedirects = readCompressedJsonFileFallback('./lib/redirects/external-sites.json')
|
||||
@@ -102,5 +61,6 @@ const precompileRedirects = diskMemoize(DISK_CACHE_FILEPATH, async (pageList) =>
|
||||
})
|
||||
|
||||
return allRedirects
|
||||
})
|
||||
}
|
||||
|
||||
export default precompileRedirects
|
||||
|
||||
@@ -108,7 +108,10 @@ router.get(
|
||||
)}-${language}`
|
||||
|
||||
const hits = []
|
||||
const timed = statsd.asyncTimer(getSearchResults, 'api.search', ['version:legacy'])
|
||||
const timed = statsd.asyncTimer(getSearchResults, 'api.search', [
|
||||
'version:legacy',
|
||||
`indexName:${indexName}`,
|
||||
])
|
||||
try {
|
||||
const searchResults = await timed({
|
||||
indexName,
|
||||
@@ -244,7 +247,10 @@ router.get(
|
||||
// This measurement then combines both the Node-work and the total
|
||||
// network-work but we know that roughly 99.5% of the total time is
|
||||
// spent in the network-work time so this primarily measures that.
|
||||
const timed = statsd.asyncTimer(getSearchResults, 'api.search', ['version:v1'])
|
||||
const timed = statsd.asyncTimer(getSearchResults, 'api.search', [
|
||||
'version:v1',
|
||||
`indexName:${indexName}`,
|
||||
])
|
||||
|
||||
try {
|
||||
const { meta, hits } = await timed({ indexName, query, page, size, debug, sort })
|
||||
|
||||
Reference in New Issue
Block a user