forked from Darkle/Roffline
-
Notifications
You must be signed in to change notification settings - Fork 0
/
boot.ts
44 lines (36 loc) · 1.27 KB
/
boot.ts
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
import dnscache from 'dns-cache'
import * as R from 'ramda'
import RA from 'ramda-adjunct'
import { startServer } from './server/server'
import { mainLogger } from './logging/logging'
import { db } from './db/db'
import { ensurePostsMediaDownloadFolderExists } from './server/utils'
import { scheduleUpdates } from './downloads/update-scheduler'
function bailOnFatalError(err: Error): void {
console.error(err)
R.tryCatch(mainLogger.fatal, RA.noop)(err)
db.close()
.catch(RA.noop)
.finally(() => {
// Wait for file IO from the mainLogger.fatal() call above to finish.
setImmediate(_ => process.exit(1))
})
}
process.on('unhandledRejection', bailOnFatalError)
process.on('uncaughtException', bailOnFatalError)
/*****
We need to cache dns requests as we are sometimes making thousands of fetch requests
in a short timespan. Without this we used to get error messages like:
`FetchError: request to https://www.reddit.com/comments/qxy9ae.json failed, reason: getaddrinfo ENOTFOUND www.reddit.com`
*****/
const thirtyMinutesInMs = 1800000
dnscache(thirtyMinutesInMs)
db.init()
.then(ensurePostsMediaDownloadFolderExists)
.then(startServer)
.then(scheduleUpdates)
.catch(err => {
console.error(err)
mainLogger.fatal(err)
process.exit(1)
})