forked from github/docs
-
Notifications
You must be signed in to change notification settings - Fork 0
/
purge-redis-pages.js
117 lines (94 loc) · 3.8 KB
/
purge-redis-pages.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
#!/usr/bin/env node
// [start-readme]
//
// Run this script to manually purge the Redis rendered page cache.
// This will typically only be run by Heroku during the deployment process,
// as triggered via our Procfile's "release" phase configuration.
//
// [end-readme]
const Redis = require('ioredis')
const { REDIS_URL, HEROKU_RELEASE_VERSION, HEROKU_PRODUCTION_APP } = process.env
const isHerokuProd = HEROKU_PRODUCTION_APP === 'true'
const pageCacheDatabaseNumber = 1
const keyScanningPattern = HEROKU_RELEASE_VERSION ? '*:rp:*' : 'rp:*'
const scanSetSize = 250
const startTime = Date.now()
const expirationDuration = 30 * 60 * 1000 // 30 minutes
const expirationTimestamp = startTime + expirationDuration // 30 minutes from now
// print keys to be purged without actually purging
const dryRun = ['-d', '--dry-run'].includes(process.argv[2])
// verify environment variables
if (!REDIS_URL) {
if (isHerokuProd) {
console.error('Error: you must specify the REDIS_URL environment variable.\n')
process.exit(1)
} else {
console.warn('Warning: you did not specify a REDIS_URL environment variable. Exiting...\n')
process.exit(0)
}
}
console.log({
HEROKU_RELEASE_VERSION,
HEROKU_PRODUCTION_APP
})
purgeRenderedPageCache()
function purgeRenderedPageCache () {
const redisClient = new Redis(REDIS_URL, { db: pageCacheDatabaseNumber })
let totalKeyCount = 0
let iteration = 0
// Create a readable stream (object mode) for the SCAN cursor
const scanStream = redisClient.scanStream({
match: keyScanningPattern,
count: scanSetSize
})
scanStream.on('end', function () {
console.log(`Done purging keys; affected total: ${totalKeyCount}`)
console.log(`Time elapsed: ${Date.now() - startTime} ms`)
// This seems to be unexpectedly necessary
process.exit(0)
})
scanStream.on('error', function (error) {
console.error('An unexpected error occurred!\n' + error.stack)
console.error('\nAborting...')
process.exit(1)
})
scanStream.on('data', async function (keys) {
console.log(`[Iteration ${iteration++}] Received ${keys.length} keys...`)
// NOTE: It is possible for a SCAN cursor iteration to return 0 keys when
// using a MATCH because it is applied after the elements are retrieved
if (keys.length === 0) return
if (dryRun) {
console.log(`DRY RUN! This iteration might have set TTL for up to ${keys.length} keys:\n - ${keys.join('\n - ')}`)
return
}
// Pause the SCAN stream while we set a TTL on these keys
scanStream.pause()
// Find existing TTLs to ensure we aren't extending the TTL if it's already set
// PTTL mykey // only operate on -1 result values or those greater than ONE_HOUR_FROM_NOW
const pttlPipeline = redisClient.pipeline()
keys.forEach(key => pttlPipeline.pttl(key))
const pttlResults = await pttlPipeline.exec()
// Update pertinent keys to have TTLs set
let updatingKeyCount = 0
const pexpireAtPipeline = redisClient.pipeline()
keys.forEach((key, i) => {
const [error, pttl] = pttlResults[i]
const needsShortenedTtl = error == null && (pttl === -1 || pttl > expirationDuration)
const isOldKey = !HEROKU_RELEASE_VERSION || !key.startsWith(`${HEROKU_RELEASE_VERSION}:`)
if (needsShortenedTtl && isOldKey) {
pexpireAtPipeline.pexpireat(key, expirationTimestamp)
updatingKeyCount += 1
}
})
// Only update TTLs if there are records worth updating
if (updatingKeyCount > 0) {
// Set all the TTLs
const pexpireAtResults = await pexpireAtPipeline.exec()
const updatedResults = pexpireAtResults.filter(([error, result]) => error == null && result === 1)
// Count only the entries whose TTLs were successfully updated
totalKeyCount += updatedResults.length
}
// Resume the SCAN stream
scanStream.resume()
})
}