Skip to content

Commit

Permalink
fix: cache most things at process exit instead
Browse files Browse the repository at this point in the history
  • Loading branch information
TurtIeSocks committed Feb 8, 2024
1 parent de18e1e commit 56eb47e
Show file tree
Hide file tree
Showing 5 changed files with 95 additions and 27 deletions.
7 changes: 1 addition & 6 deletions packages/types/lib/general.d.ts
Original file line number Diff line number Diff line change
Expand Up @@ -27,12 +27,9 @@ export type RMGeoJSON = {
features: RMFeature[]
}

import masterfile = require('packages/masterfile/lib/data/masterfile.json')
import { Config } from './config'
import { SliderProps } from '@mui/material'

export type Masterfile = typeof masterfile

export type Strategy = 'discord' | 'telegram' | 'local'

export type S2Polygon = [number, number][]
Expand Down Expand Up @@ -88,9 +85,7 @@ export interface UICONS {
egg: UiconImage[]
}
reward: {
[
key: Masterfile['questRewardTypes'][keyof Masterfile['questRewardTypes']]
]: UiconImage[]
[key: string]: UiconImage[]
}
spawnpoint: UiconImage[]
team: UiconImage[]
Expand Down
11 changes: 4 additions & 7 deletions server/src/services/DbCheck.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ const config = require('@rm/config')

const { log, HELPERS } = require('@rm/logger')
const { getBboxFromCenter } = require('./functions/getBbox')
const { setCache, getCache } = require('./cache')
const { getCache } = require('./cache')

const softLimit = config.getSafe('api.searchSoftKmLimit')
const hardLimit = config.getSafe('api.searchHardKmLimit')
Expand Down Expand Up @@ -248,7 +248,7 @@ module.exports = class DbCheck {
* @param {boolean} historical
* @returns {void}
*/
async setRarity(results, historical = false) {
setRarity(results, historical = false) {
const base = {}
const mapKey = historical ? 'historical' : 'rarity'
let total = 0
Expand Down Expand Up @@ -279,7 +279,6 @@ module.exports = class DbCheck {
this[mapKey][id] = 'common'
}
})
await setCache(`${mapKey}.json`, this[mapKey])
}

async historicalRarity() {
Expand All @@ -295,7 +294,7 @@ module.exports = class DbCheck {
.groupBy('pokemon_id'),
),
)
await this.setRarity(
this.setRarity(
results.map((result) =>
Object.fromEntries(
result.map((pkmn) => [`${pkmn.pokemon_id}`, +pkmn.total]),
Expand Down Expand Up @@ -595,10 +594,9 @@ module.exports = class DbCheck {
Object.values(titles),
]),
)
await setCache('questConditions.json', this.questConditions)
}
if (model === 'Pokemon') {
await this.setRarity(results, false)
this.setRarity(results, false)
}
if (results.length === 1) return results[0].available
if (results.length > 1) {
Expand Down Expand Up @@ -642,7 +640,6 @@ module.exports = class DbCheck {
...results.map((result) => result.max_duration),
)
log.info(HELPERS.db, 'Updating filter context for routes')
await setCache('filterContext.json', this.filterContext)
} catch (e) {
log.error(
HELPERS.db,
Expand Down
37 changes: 27 additions & 10 deletions server/src/services/areas.js
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,27 @@ const manualGeojson = {
}),
}

/**
* @param {string} fileName
* @returns {import("@rm/types").RMGeoJSON}
*/
const loadFromFile = (fileName) => {
try {
if (fileName.startsWith('http')) {
return getCache(fileName, DEFAULT_RETURN)
}
if (fs.existsSync(resolve(__dirname, `../configs/${fileName}`))) {
return JSON.parse(
fs.readFileSync(resolve(__dirname, `../configs/${fileName}`), 'utf-8'),
)
}
return DEFAULT_RETURN
} catch (e) {
log.warn(HELPERS.areas, `Failed to load ${fileName} from file system`, e)
return DEFAULT_RETURN
}
}

/**
* @param {string} location
* @returns {Promise<import("@rm/types").RMGeoJSON>}
Expand All @@ -57,7 +78,7 @@ const getGeojson = async (location) => {
.then(async (res) => {
if (res?.data) {
log.info(HELPERS.areas, 'Caching', location, 'from Kōji')
await setCache(`${location.replace(/\//g, '__')}.json`, res.data)
await setCache(location, res.data)
return res.data
}
return DEFAULT_RETURN
Expand All @@ -68,7 +89,7 @@ const getGeojson = async (location) => {
'Failed to fetch Kōji geojson, attempting to read from backup',
err,
)
const cached = getCache(`${location.replace(/\//g, '__')}.json`)
const cached = getCache(location)
if (cached) {
log.info(HELPERS.areas, 'Reading from koji_backups for', location)
return cached
Expand All @@ -77,11 +98,7 @@ const getGeojson = async (location) => {
return DEFAULT_RETURN
})
}
if (fs.existsSync(resolve(__dirname, `../configs/${location}`))) {
return JSON.parse(
fs.readFileSync(resolve(__dirname, `../configs/${location}`), 'utf-8'),
)
}
return loadFromFile(location)
} catch (e) {
log.warn(HELPERS.areas, 'Issue with getting the geojson', e)
}
Expand Down Expand Up @@ -111,7 +128,7 @@ const loadScanPolygons = async (fileName, domain) => {
? `${f.properties.parent}-${f.properties.name}`
: f.properties.name,
center: /** @type {[number,number]} */ (
center(f).geometry.coordinates.reverse()
center(f).geometry.coordinates.slice().reverse()
),
},
})),
Expand Down Expand Up @@ -335,13 +352,13 @@ const loadCachedAreas = () => {

/** @type {Record<string, import("@rm/types").RMGeoJSON>} */
const scanAreas = {
main: getCache(`${fileName.replace(/\//g, '__')}.json`, DEFAULT_RETURN),
main: loadFromFile(fileName),
...Object.fromEntries(
config
.getSafe('multiDomains')
.map((d) => [
d.general?.geoJsonFileName ? d.domain.replaceAll('.', '_') : 'main',
getCache(d.general?.geoJsonFileName || fileName, DEFAULT_RETURN),
loadFromFile(d.general?.geoJsonFileName || fileName),
]),
),
}
Expand Down
15 changes: 11 additions & 4 deletions server/src/services/cache.js
Original file line number Diff line number Diff line change
@@ -1,17 +1,23 @@
// @ts-check
const fs = require('fs')
const path = require('path')

const { log, HELPERS } = require('@rm/logger')

const CACHE_DIR = path.join(__dirname, '../../.cache')

/** @param {string} str */
const fsFriendlyName = (str) =>
str.startsWith('http') ? `${str.replace(/\//g, '__')}.json` : str

/**
* @template T
* @param {string} fileName
* @param {string} unsafeName
* @param {T} [fallback]
* @returns {T}
*/
const getCache = (fileName, fallback = null) => {
const getCache = (unsafeName, fallback = null) => {
const fileName = fsFriendlyName(unsafeName)
try {
if (!fs.existsSync(path.resolve(CACHE_DIR, fileName))) return fallback
const data = JSON.parse(
Expand All @@ -30,10 +36,11 @@ const getCache = (fileName, fallback = null) => {
}

/**
* @param {string} fileName
* @param {string} unsafeName
* @param {object | string} data
*/
const setCache = async (fileName, data) => {
const setCache = async (unsafeName, data) => {
const fileName = fsFriendlyName(unsafeName)
try {
if (!fs.existsSync(CACHE_DIR)) await fs.promises.mkdir(CACHE_DIR)
await fs.promises.writeFile(
Expand Down
52 changes: 52 additions & 0 deletions server/src/services/initialization.js
Original file line number Diff line number Diff line change
@@ -1,20 +1,72 @@
// @ts-check
const NodeCache = require('node-cache')

const config = require('@rm/config')
const { log, HELPERS } = require('@rm/logger')

const DbCheck = require('./DbCheck')
const EventManager = require('./EventManager')
const PvpWrapper = require('./PvpWrapper')
const { getCache, setCache } = require('./cache')

const Db = new DbCheck()
const Pvp = config.getSafe('api.pvp.reactMapHandlesPvp')
? new PvpWrapper()
: null
const Event = new EventManager()

const userCache = new NodeCache({ stdTTL: 60 * 60 * 24 })

Object.entries(getCache('scanUserHistory.json', {})).forEach(([k, v]) =>
userCache.set(k, v),
)

Event.setTimers(Db, Pvp)

/** @param {NodeJS.Signals} e */
const onShutdown = async (e) => {
log.info(HELPERS.ReactMap, 'received signal', e, 'writing cache...')
const cacheObj = {}
userCache.keys().forEach((key) => {
cacheObj[key] = userCache.get(key)
})
await Promise.all([
setCache('scanUserHistory.json', cacheObj),
setCache('rarity.json', Db.rarity),
setCache('historical.json', Db.historical),
setCache('available.json', Event.available),
setCache('filterContext.json', Db.filterContext),
setCache('questConditions.json', Db.questConditions),
setCache('uaudio.json', Event.uaudio),
setCache('uicons.json', Event.uicons),
])
log.info(HELPERS.ReactMap, 'exiting...')
}

process.on('SIGINT', async (e) => {
await onShutdown(e)
process.exit(0)
})
process.on('SIGTERM', async (e) => {
await onShutdown(e)
process.exit(0)
})
process.on('SIGUSR1', async (e) => {
await onShutdown(e)
process.exit(0)
})
process.on('SIGUSR2', async (e) => {
await onShutdown(e)
process.exit(0)
})
process.on('uncaughtException', async () => {
await onShutdown('SIGBREAK')
process.exit(99)
})

module.exports = {
Db,
Pvp,
Event,
userCache,
}

0 comments on commit 56eb47e

Please sign in to comment.