1
0
mirror of synced 2026-01-15 15:01:34 -05:00

Branch was updated using the 'autoupdate branch' Actions workflow.

This commit is contained in:
Octomerger Bot
2021-01-08 06:15:47 +10:00
committed by GitHub
3 changed files with 120 additions and 1 deletions

View File

@@ -1 +1,3 @@
web: NODE_ENV=production node server.js
release: NODE_ENV=production node script/purge-redis-pages.js

View File

@@ -5,7 +5,7 @@ sections:
- heading: GitHub Actions Beta
notes:
- |
[GitHub Actions](https://github.com/features/actions) is a powerful, flexible solution for CI/CD and workflow automation. GitHub Actions on Enteprise Server includes tools to help you manage the service, including key metrics in the Management Console, audit logs and access controls to help you control the roll out.
[GitHub Actions](https://github.com/features/actions) is a powerful, flexible solution for CI/CD and workflow automation. GitHub Actions on Enterprise Server includes tools to help you manage the service, including key metrics in the Management Console, audit logs and access controls to help you control the roll out.
You will need to provide your own [storage](https://docs.github.com/en/enterprise/2.22/admin/github-actions/enabling-github-actions-and-configuring-storage) and runners for GitHub Actions. AWS S3, Azure Blob Storage and MinIO are supported. Please review the [updated minimum requirements for your platform](https://docs.github.com/en/enterprise/2.22/admin/installation/setting-up-a-github-enterprise-server-instance) before you turn on GitHub Actions. To learn more, contact the GitHub Sales team or [sign up for the beta](https://resources.github.com/beta-signup/). {% comment %} https://github.com/github/releases/issues/775 {% endcomment %}

117
script/purge-redis-pages.js Normal file
View File

@@ -0,0 +1,117 @@
#!/usr/bin/env node
// [start-readme]
//
// Run this script to manually purge the Redis rendered page cache.
// This will typically only be run by Heroku during the deployment process,
// as triggered via our Procfile's "release" phase configuration.
//
// [end-readme]
const Redis = require('ioredis')
const { REDIS_URL, HEROKU_RELEASE_VERSION, HEROKU_PRODUCTION_APP } = process.env
const isHerokuProd = HEROKU_PRODUCTION_APP === 'true'
const pageCacheDatabaseNumber = 1
const keyScanningPattern = HEROKU_RELEASE_VERSION ? '*:rp:*' : 'rp:*'
const scanSetSize = 250
const startTime = Date.now()
const expirationDuration = 30 * 60 * 1000 // 30 minutes
const expirationTimestamp = startTime + expirationDuration // 30 minutes from now
// print keys to be purged without actually purging
const dryRun = ['-d', '--dry-run'].includes(process.argv[2])
// verify environment variables
if (!REDIS_URL) {
if (isHerokuProd) {
console.error('Error: you must specify the REDIS_URL environment variable.\n')
process.exit(1)
} else {
console.warn('Warning: you did not specify a REDIS_URL environment variable. Exiting...\n')
process.exit(0)
}
}
console.log({
HEROKU_RELEASE_VERSION,
HEROKU_PRODUCTION_APP
})
purgeRenderedPageCache()
function purgeRenderedPageCache () {
const redisClient = new Redis(REDIS_URL, { db: pageCacheDatabaseNumber })
let totalKeyCount = 0
let iteration = 0
// Create a readable stream (object mode) for the SCAN cursor
const scanStream = redisClient.scanStream({
match: keyScanningPattern,
count: scanSetSize
})
scanStream.on('end', function () {
console.log(`Done purging keys; affected total: ${totalKeyCount}`)
console.log(`Time elapsed: ${Date.now() - startTime} ms`)
// This seems to be unexpectedly necessary
process.exit(0)
})
scanStream.on('error', function (error) {
console.error('An unexpected error occurred!\n' + error.stack)
console.error('\nAborting...')
process.exit(1)
})
scanStream.on('data', async function (keys) {
console.log(`[Iteration ${iteration++}] Received ${keys.length} keys...`)
// NOTE: It is possible for a SCAN cursor iteration to return 0 keys when
// using a MATCH because it is applied after the elements are retrieved
if (keys.length === 0) return
if (dryRun) {
console.log(`DRY RUN! This iteration might have set TTL for up to ${keys.length} keys:\n - ${keys.join('\n - ')}`)
return
}
// Pause the SCAN stream while we set a TTL on these keys
scanStream.pause()
// Find existing TTLs to ensure we aren't extending the TTL if it's already set
// PTTL mykey // only operate on -1 result values or those greater than ONE_HOUR_FROM_NOW
const pttlPipeline = redisClient.pipeline()
keys.forEach(key => pttlPipeline.pttl(key))
const pttlResults = await pttlPipeline.exec()
// Update pertinent keys to have TTLs set
let updatingKeyCount = 0
const pexpireAtPipeline = redisClient.pipeline()
keys.forEach((key, i) => {
const [error, pttl] = pttlResults[i]
const needsShortenedTtl = error == null && (pttl === -1 || pttl > expirationDuration)
const isOldKey = !HEROKU_RELEASE_VERSION || !key.startsWith(`${HEROKU_RELEASE_VERSION}:`)
if (needsShortenedTtl && isOldKey) {
pexpireAtPipeline.pexpireat(key, expirationTimestamp)
updatingKeyCount += 1
}
})
// Only update TTLs if there are records worth updating
if (updatingKeyCount > 0) {
// Set all the TTLs
const pexpireAtResults = await pexpireAtPipeline.exec()
const updatedResults = pexpireAtResults.filter(([error, result]) => error == null && result === 1)
// Count only the entries whose TTLs were successfully updated
totalKeyCount += updatedResults.length
}
// Resume the SCAN stream
scanStream.resume()
})
}