@streamr/geoip-location 100.2.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json ADDED
@@ -0,0 +1,39 @@
1
+ {
2
+ "name": "@streamr/geoip-location",
3
+ "version": "100.2.4",
4
+ "description": "Library for getting location information from IP addresses based on MaxMind GeoLite2 databases",
5
+ "repository": {
6
+ "type": "git",
7
+ "url": "git+https://github.com/streamr-dev/network.git",
8
+ "directory": "packages/geoip-location"
9
+ },
10
+ "main": "dist/src/exports.js",
11
+ "browser": {
12
+ "dist/src/exports.js": false,
13
+ "dist/src/GeoIpLocator.js": false,
14
+ "dist/src/downloadGeoIpDatabase.js": false
15
+ },
16
+ "types": "dist/src/exports.d.ts",
17
+ "license": "Apache-2.0",
18
+ "author": "Streamr Network AG <contact@streamr.network>",
19
+ "scripts": {
20
+ "build": "tsc -b tsconfig.node.json",
21
+ "check": "tsc -p ./tsconfig.jest.json --noEmit",
22
+ "clean": "jest --clearCache || true; rm -rf dist *.tsbuildinfo node_modules/.cache || true",
23
+ "eslint": "eslint --cache --cache-location=node_modules/.cache/.eslintcache/ '*/**/*.{js,ts}'",
24
+ "test": "jest test/unit",
25
+ "test-unit": "jest test/unit"
26
+ },
27
+ "dependencies": {
28
+ "@streamr/utils": "100.2.4",
29
+ "long-timeout": "^0.1.1",
30
+ "mmdb-lib": "^2.1.0",
31
+ "tar": "^6.2.1",
32
+ "uuid": "^9.0.1"
33
+ },
34
+ "devDependencies": {
35
+ "@types/long-timeout": "^0.1.2",
36
+ "@types/tar": "^6.1.11",
37
+ "express": "^4.19.2"
38
+ }
39
+ }
@@ -0,0 +1,109 @@
1
+ import { Logger, filePathToNodeFormat } from '@streamr/utils'
2
+ import { CityResponse, Reader } from 'mmdb-lib'
3
+ import LongTimeout from 'long-timeout'
4
+ import { downloadGeoIpDatabase } from './downloadGeoIpDatabase'
5
+
6
+ const logger = new Logger(module)
7
+
8
+ interface GeoIpLookupResult {
9
+ latitude: number
10
+ longitude: number
11
+ }
12
+
13
+ // 30 days in milliseconds
14
+ const DEFAULT_DB_CHECK_INTERVAL = 30 * 24 * 60 * 60 * 1000
15
+ // 24 hours in milliseconds
16
+ const DEFAULT_DB_CHECK_ERROR_INTERVAL = 24 * 60 * 60 * 1000
17
+
18
+ export class GeoIpLocator {
19
+ private abortController: AbortController
20
+ private readonly geoIpDatabaseFolder: string
21
+ private readonly dbCheckInterval: number
22
+ private readonly dbCheckErrorInterval: number
23
+ private readonly mirrorUrl?: string
24
+ private reader?: Reader<CityResponse>
25
+ private dbCheckTimeout?: LongTimeout.Timeout
26
+
27
+ constructor(
28
+ geoIpDatabaseFolder: string,
29
+ dbCheckInterval = DEFAULT_DB_CHECK_INTERVAL,
30
+ dbCheckErrorInterval = DEFAULT_DB_CHECK_ERROR_INTERVAL,
31
+ mirrorUrl?: string
32
+ ) {
33
+ this.abortController = new AbortController()
34
+ this.dbCheckInterval = dbCheckInterval
35
+ this.dbCheckErrorInterval = dbCheckErrorInterval
36
+ if (!geoIpDatabaseFolder.endsWith('/')) {
37
+ geoIpDatabaseFolder += '/'
38
+ }
39
+ this.geoIpDatabaseFolder = filePathToNodeFormat(geoIpDatabaseFolder)
40
+ this.mirrorUrl = mirrorUrl
41
+ }
42
+
43
+ private checkDatabase: () => Promise<void> = async () => {
44
+ if (this.reader === undefined) {
45
+ // if we do not have a reader, create a new one in any case
46
+ this.reader = await downloadGeoIpDatabase(this.geoIpDatabaseFolder, true, this.abortController.signal, this.mirrorUrl)
47
+ } else {
48
+ // if we already have a reader, create a new one only if db has changed
49
+ const newReader = await downloadGeoIpDatabase(this.geoIpDatabaseFolder, false, this.abortController.signal, this.mirrorUrl)
50
+ if (newReader !== undefined) {
51
+ this.reader = newReader
52
+ }
53
+ }
54
+ }
55
+
56
+ private scheduleCheck: (timeout: number) => void = async (timeout: number) => {
57
+ if (this.abortController.signal.aborted) {
58
+ return
59
+ }
60
+ this.dbCheckTimeout = LongTimeout.setTimeout(async () => {
61
+ try {
62
+ await this.checkDatabase()
63
+ this.scheduleCheck(this.dbCheckInterval)
64
+ } catch (err) {
65
+ logger.warn('GeoIpLocator: GeoIP database check failed', { err })
66
+ this.scheduleCheck(this.dbCheckErrorInterval)
67
+ }
68
+ }, timeout)
69
+ }
70
+
71
+ async start(): Promise<void> {
72
+ if (this.dbCheckTimeout !== undefined) {
73
+ return
74
+ }
75
+ await this.checkDatabase()
76
+ this.scheduleCheck(this.dbCheckInterval)
77
+ }
78
+
79
+ stop(): void {
80
+ if (this.dbCheckTimeout !== undefined) {
81
+ LongTimeout.clearTimeout(this.dbCheckTimeout)
82
+ }
83
+ this.abortController.abort()
84
+ }
85
+
86
+ lookup(ip: string): GeoIpLookupResult | undefined {
87
+ if (this.reader === undefined) {
88
+ logger.warn('GeoIpLocator: lookup called before database is ready (maybe start() was not called?')
89
+ return undefined
90
+ }
91
+
92
+ // If ip is falsy, the library will crash
93
+ // this might happen despite the ts typings because the ip address
94
+ // comes from the ws server socket and is not under our control
95
+ if (!ip) {
96
+ return undefined
97
+ }
98
+
99
+ const result = this.reader.get(ip)
100
+ if (!result || !result.location || !result.location.latitude || !result.location.longitude) {
101
+ return undefined
102
+ } else {
103
+ return {
104
+ latitude: result.location.latitude,
105
+ longitude: result.location.longitude
106
+ }
107
+ }
108
+ }
109
+ }
@@ -0,0 +1,185 @@
1
+ import crypto from 'crypto'
2
+ import fs from 'fs'
3
+ import { CityResponse, Reader } from 'mmdb-lib'
4
+ import { extractFileFromTarStream } from './tarHelper'
5
+ import { v4 } from 'uuid'
6
+ import { Logger } from '@streamr/utils'
7
+
8
+ const GEOIP_MIRROR_URL = 'https://raw.githubusercontent.com/GitSquared/node-geolite2-redist/master/redist/'
9
+ const DB_NAME = 'GeoLite2-City'
10
+ const TAR_SUFFFIX = '.tar.gz'
11
+ const DB_SUFFIX = '.mmdb'
12
+ const HASH_SUFFIX = '.mmdb.sha384'
13
+
14
+ const logger = new Logger(module)
15
+
16
+ const downloadNewDb = async (
17
+ url: string,
18
+ dbFolder: string,
19
+ remoteHash: string,
20
+ abortSignal: AbortSignal
21
+ ): Promise<void> => {
22
+ // make a unique name for the temporary download folder
23
+ // in case there are multiple downloads happening at the same time
24
+
25
+ const uniqueName = v4()
26
+ const downloadFolder = dbFolder + '.download' + uniqueName
27
+ const dbFileName = DB_NAME + DB_SUFFIX
28
+ const dbFileInDownloadFolder = downloadFolder + '/' + dbFileName
29
+ const dbFileInDbFolder = dbFolder + dbFileName
30
+
31
+ let response: Response
32
+
33
+ try {
34
+ logger.debug('Downloading GeoIP database from: ' + url)
35
+ response = await fetch(url, { keepalive: false, signal: abortSignal })
36
+ } catch (e) {
37
+ // Catching and re-throwing as async exception
38
+ // here is necessary, synch exceptions cannot be caught by the caller
39
+ throw new Error('Fetch error when downloading ' + url + ', error: ' + e)
40
+ }
41
+
42
+ if (!response.ok) {
43
+ throw new Error('HTTP error when downloading ' + url + ', status: ' + response.status)
44
+ }
45
+
46
+ // extract the tarball to a temporary folder
47
+
48
+ try {
49
+ fs.mkdirSync(downloadFolder, { recursive: true })
50
+ } catch (e) {
51
+ throw new Error('Error creating temporary folder ' + downloadFolder + ', error: ' + e)
52
+ }
53
+
54
+ try {
55
+ await extractFileFromTarStream(dbFileName, response.body!, downloadFolder)
56
+ } catch (e) {
57
+ try {
58
+ fs.rmSync(downloadFolder, { recursive: true })
59
+ } catch (e2) {
60
+ // ignore error when removing the temporary folder
61
+ }
62
+ throw e
63
+ }
64
+
65
+ // check the hash of the extracted file
66
+
67
+ if (!isDbFileValid(dbFileInDownloadFolder, remoteHash)) {
68
+ try {
69
+ fs.rmSync(downloadFolder, { recursive: true })
70
+ } catch (e2) {
71
+ // ignore error when removing the temporary folder
72
+ }
73
+ throw new Error('Downloaded database hash does not match the expected hash')
74
+ }
75
+
76
+ try {
77
+ // move the extracted file to the correct location
78
+ fs.renameSync(dbFileInDownloadFolder, dbFileInDbFolder)
79
+ } catch (e) {
80
+ throw new Error('Error moving ' + dbFileInDownloadFolder + ' to ' + dbFileInDbFolder + ', error: ' + e)
81
+ } finally {
82
+ try {
83
+ fs.rmSync(downloadFolder, { recursive: true })
84
+ } catch (e2) {
85
+ // ignore error when removing the temporary folder
86
+ }
87
+ }
88
+
89
+ // set the db file permissions to rw only for the owner
90
+
91
+ try {
92
+ fs.chmodSync(dbFileInDbFolder, 0o600)
93
+ } catch (err) {
94
+ throw new Error('Error setting permissions on ' + dbFileInDbFolder + ', error: ' + err)
95
+ }
96
+
97
+ logger.debug('Downloaded GeoIP database to: ' + dbFileInDbFolder)
98
+
99
+ }
100
+
101
+ const downloadRemoteHash = async (remoteHashUrl: string, abortSignal: AbortSignal): Promise<string> => {
102
+ // download the hash of the latest GeoIP database using fetch as text and trim it
103
+ let response: Response
104
+
105
+ try {
106
+ logger.debug('Downloading GeoIP database hash from: ' + remoteHashUrl)
107
+ response = await fetch(remoteHashUrl, { signal: abortSignal })
108
+ } catch (e) {
109
+ // Catching and re-throwing as async exception
110
+ // here is necessary, synch exceptions cannot be caught by the caller
111
+ throw new Error('Fetch error when downloading ' + remoteHashUrl + ', error: ' + e)
112
+ }
113
+
114
+ if (!response.ok) {
115
+ throw new Error('HTTP error when downloading ' + remoteHashUrl + ', status: ' + response.status)
116
+ }
117
+
118
+ return (await response.text()).trim()
119
+ }
120
+
121
+ const isDbFileValid = async (dbFile: string, remoteHash: string): Promise<boolean> => {
122
+ // check if the local db exists and calculate its hash
123
+
124
+ try {
125
+ const db = fs.readFileSync(dbFile)
126
+ const localHash = crypto.createHash('sha384').update(db).digest('hex')
127
+
128
+ // if the hashes are different, download the latest database
129
+ if (localHash !== remoteHash) {
130
+ return false
131
+ } else {
132
+ return true
133
+ }
134
+ } catch (e) {
135
+ // if the local db does not exist, or some other exception occurres db is not considered valid
136
+ return false
137
+ }
138
+ }
139
+
140
+ // returns a Reader if a new db was downloaded, or if the caller wants to force return a reader
141
+ // also if there was no need to download a new db
142
+
143
+ export const downloadGeoIpDatabase = async (
144
+ dbFolder: string,
145
+ forceReturnReader: boolean,
146
+ abortSignal: AbortSignal,
147
+ mirrorUrl?: string
148
+ ): Promise<Reader<CityResponse> | undefined> => {
149
+ // This will throw if the download folder is not readable
150
+ if (!fs.existsSync(dbFolder)) {
151
+ // This will throw if the download folder is not writable
152
+ fs.mkdirSync(dbFolder, { recursive: true })
153
+ }
154
+ if (!dbFolder.endsWith('/')) {
155
+ dbFolder += '/'
156
+ }
157
+ let geoIpMirrorUrl = GEOIP_MIRROR_URL
158
+ if (mirrorUrl !== undefined) {
159
+ if (!mirrorUrl.endsWith('/')) {
160
+ mirrorUrl += '/'
161
+ }
162
+ geoIpMirrorUrl = mirrorUrl
163
+ }
164
+ const remoteHashUrl = geoIpMirrorUrl + DB_NAME + HASH_SUFFIX
165
+ const dbDownloadUrl = geoIpMirrorUrl + DB_NAME + TAR_SUFFFIX
166
+ const dbFileInDbFolder = dbFolder + DB_NAME + DB_SUFFIX
167
+
168
+ const remoteHash = await downloadRemoteHash(remoteHashUrl, abortSignal)
169
+ const dbValid = await isDbFileValid(dbFileInDbFolder, remoteHash)
170
+ if (dbValid === false) {
171
+ await downloadNewDb(dbDownloadUrl, dbFolder, remoteHash, abortSignal)
172
+ // return new reader if db was downloaded
173
+ return new Reader<CityResponse>(fs.readFileSync(dbFileInDbFolder))
174
+ } else {
175
+ logger.debug('The hash of the local GeoIP database matches the remote hash, no need to download a new database')
176
+ }
177
+ if (forceReturnReader) {
178
+ // return reader also for old db the caller wants it
179
+ return new Reader<CityResponse>(fs.readFileSync(dbFileInDbFolder))
180
+ } else {
181
+ // return undefined if the db is already up to date
182
+ return undefined
183
+ }
184
+ }
185
+
package/src/exports.ts ADDED
@@ -0,0 +1 @@
1
+ export { GeoIpLocator } from './GeoIpLocator'
@@ -0,0 +1,35 @@
1
+ import { Readable, pipeline } from 'stream'
2
+ import tar from 'tar'
3
+ import { ReadableStream } from 'stream/web'
4
+ import NodePath from 'path' // use NodePath to avoid conflict with other 'path' symbols
5
+ import fs from 'fs'
6
+
7
+ const doExtractFileFromTarStream = (fileName: string, stream: ReadableStream<any>, downloadFolder: string): Promise<void> => {
8
+ return new Promise((resolve, reject) => {
9
+ try {
10
+ const nodeStream = Readable.fromWeb(stream)
11
+ pipeline(nodeStream,
12
+ tar.x({
13
+ cwd: downloadFolder,
14
+ filter: (entryPath: string): boolean => NodePath.basename(entryPath) === fileName,
15
+ strip: 1
16
+ }), (err) => {
17
+ if (err) {
18
+ reject(new Error('Error extracting tarball to ' + downloadFolder + ', error: ' + err))
19
+ } else {
20
+ resolve()
21
+ }
22
+ })
23
+ } catch (e) {
24
+ reject(new Error('Failed to create nodejs Readable from web stream: ' + e))
25
+ }
26
+ })
27
+ }
28
+
29
+ export const extractFileFromTarStream = async (fileName: string, stream: ReadableStream<any>, downloadFolder: string): Promise<void> => {
30
+ await doExtractFileFromTarStream(fileName, stream, downloadFolder)
31
+ if (!fs.existsSync(NodePath.join(downloadFolder, fileName))) {
32
+ throw new Error('File not found in tarball: ' + fileName)
33
+ }
34
+ }
35
+
@@ -0,0 +1,176 @@
1
+ import express from 'express'
2
+ import http from 'http'
3
+ import { Logger, wait } from '@streamr/utils'
4
+ import { fetchFileToMemory } from './fetchFileToMemory'
5
+ import fs from 'fs'
6
+ import { v4 } from 'uuid'
7
+ import EventEmitter from 'eventemitter3'
8
+ import { Duplex, pipeline } from 'stream'
9
+
10
+ const logger = new Logger(module)
11
+
12
+ type ExpressType = ReturnType<typeof express>
13
+ type ServerType = ReturnType<ExpressType['listen']>
14
+
15
+ const dbUrl = 'https://raw.githubusercontent.com/GitSquared/node-geolite2-redist/master/redist/GeoLite2-City.tar.gz'
16
+ const hashUrl = 'https://raw.githubusercontent.com/GitSquared/node-geolite2-redist/master/redist/GeoLite2-City.mmdb.sha384'
17
+
18
+ const dbFileName = '/GeoLite2-City.tar.gz'
19
+ const hashFileName = '/GeoLite2-City.mmdb.sha384'
20
+
21
+ const CACHE_PATH = '/tmp/geoip-location-test-cache'
22
+
23
+ export interface TestServerEvents {
24
+ closed: () => void
25
+ }
26
+
27
+ function bufferToStream(buf: Buffer) {
28
+ const tmp = new Duplex()
29
+ tmp.push(buf)
30
+ tmp.push(null)
31
+ return tmp
32
+ }
33
+
34
+ export class TestServer extends EventEmitter<TestServerEvents> {
35
+ private server?: ServerType
36
+ private abortController?: AbortController
37
+
38
+ private static hashData?: Uint8Array
39
+ private static dbData?: Uint8Array
40
+
41
+ private static async prefetchData(): Promise<void> {
42
+
43
+ TestServer.hashData = await fetchFileToMemory(hashUrl)
44
+
45
+ // check if db data is already prefetched to CACHE_PATH
46
+
47
+ if (fs.existsSync(CACHE_PATH + hashFileName) && fs.existsSync(CACHE_PATH + dbFileName)) {
48
+ // read hash data from CACHE_PATH
49
+ const cachedHash = fs.readFileSync(CACHE_PATH + hashFileName)
50
+
51
+ if (cachedHash.equals(TestServer.hashData)) {
52
+ TestServer.dbData = fs.readFileSync(CACHE_PATH + dbFileName)
53
+ return
54
+ }
55
+ }
56
+
57
+ // eslint-disable-next-line require-atomic-updates
58
+ TestServer.dbData = await fetchFileToMemory(dbUrl)
59
+
60
+ // save db and hash data to CACHE_PATH
61
+ try {
62
+ fs.mkdirSync(CACHE_PATH, { recursive: true })
63
+ } catch (e) {
64
+ // ignore error when creating the cache folder
65
+ }
66
+ // ensure there is never an incomplete file in the fs
67
+ const uniqueName = v4()
68
+
69
+ fs.writeFileSync(CACHE_PATH + hashFileName + uniqueName, TestServer.hashData)
70
+ fs.renameSync(CACHE_PATH + hashFileName + uniqueName, CACHE_PATH + hashFileName)
71
+
72
+ fs.writeFileSync(CACHE_PATH + dbFileName + uniqueName, TestServer.dbData)
73
+ fs.renameSync(CACHE_PATH + dbFileName + uniqueName, CACHE_PATH + dbFileName)
74
+ }
75
+
76
+ private async writeDataKilobytesPerSecond(res: http.ServerResponse, data: Uint8Array, kilobytesPerSecond?: number): Promise<void> {
77
+ let delayMilliseconds = 1
78
+
79
+ if (kilobytesPerSecond) {
80
+ delayMilliseconds = 1000 / kilobytesPerSecond
81
+ }
82
+ const chuckSize = 1024
83
+ for (let i = 0; i < data.length && !this.abortController?.signal.aborted; i += chuckSize) {
84
+ let end = i + chuckSize
85
+ if (end > data.length) {
86
+ end = data.length
87
+ }
88
+
89
+ res.write(data.slice(i, end))
90
+
91
+ if (delayMilliseconds !== undefined) {
92
+ await wait(delayMilliseconds, this.abortController?.signal)
93
+ } else {
94
+ await wait(0, this.abortController?.signal)
95
+ }
96
+ }
97
+ }
98
+
99
+ startServer(port: number, kiloBytesPerSecond?: number): Promise<void> {
100
+ return new Promise((resolve, _reject) => {
101
+ const app = express()
102
+
103
+ app.get(dbFileName, (_req, res) => {
104
+ if (kiloBytesPerSecond !== undefined) {
105
+ res.setHeader('Content-Type', 'application/gzip')
106
+ this.writeDataKilobytesPerSecond(res, TestServer.dbData!,
107
+ kiloBytesPerSecond).then(() => {
108
+ res.end()
109
+ }).catch((_err) => {
110
+ res.end()
111
+ })
112
+ } else {
113
+ // send data without throttling from file
114
+ const readable = bufferToStream(Buffer.from(TestServer.dbData!))
115
+ pipeline(readable, res, (err) => {
116
+ if (err) {
117
+ logger.error('Error sending db file: ', { err })
118
+ }
119
+ })
120
+ }
121
+ })
122
+
123
+ app.get(hashFileName, (_req, res) => {
124
+ // always send hash data without throttling
125
+ const readable = bufferToStream(Buffer.from(TestServer.hashData!))
126
+ pipeline(readable, res, (err) => {
127
+ if (err) {
128
+ logger.error('Error sending hash file: ', { err })
129
+ }
130
+ })
131
+ })
132
+
133
+ this.server = app.listen(port, '127.0.0.1', () => {
134
+ logger.info('Test server is running on port ' + port)
135
+
136
+ // The server is not really ready after listen callback, possible bug in express
137
+ setTimeout(() => {
138
+ resolve()
139
+ }, 1000)
140
+ })
141
+ })
142
+ }
143
+
144
+ async start(port: number, kiloBytesPerSecond?: number): Promise<void> {
145
+ if (!TestServer.hashData || !TestServer.dbData) {
146
+ await TestServer.prefetchData()
147
+ }
148
+
149
+ if (this.server) {
150
+ throw new Error('Test server already running')
151
+ }
152
+
153
+ this.abortController = new AbortController()
154
+ await this.startServer(port, kiloBytesPerSecond)
155
+ }
156
+
157
+ stop(): Promise<void> {
158
+ return new Promise((resolve, _reject) => {
159
+ if (this.server) {
160
+ this.abortController!.abort()
161
+
162
+ this.server.close((err) => {
163
+ if (err) {
164
+ logger.warn('Error closing server: ', { err })
165
+ }
166
+ this.server = undefined
167
+ this.emit('closed')
168
+ resolve()
169
+ })
170
+ this.server.closeAllConnections()
171
+ } else {
172
+ resolve()
173
+ }
174
+ })
175
+ }
176
+ }
@@ -0,0 +1,7 @@
1
+ export const fetchFileToMemory = async (url: string): Promise<Uint8Array> => {
2
+ const response = await fetch(url)
3
+ if (!response.ok) {
4
+ throw new Error('HTTP error when downloading ' + url + ', status: ' + response.status)
5
+ }
6
+ return new Uint8Array(await response.arrayBuffer())
7
+ }
@@ -0,0 +1,107 @@
1
+ import { GeoIpLocator } from '../../src/GeoIpLocator'
2
+ import { wait, waitForCondition } from '@streamr/utils'
3
+ import fs from 'fs'
4
+ import { TestServer } from '../helpers/TestServer'
5
+
6
+ describe('GeoIpLocator', () => {
7
+
8
+ const serverPort = 31991
9
+ const mirrorUrl = 'http://127.0.0.1:' + serverPort + '/'
10
+
11
+ const DB_FILENAME = 'GeoLite2-City.mmdb'
12
+ let dirCounter = 0
13
+ const dbPath = '/tmp'
14
+ let dbDir: string
15
+ let locator: GeoIpLocator
16
+ let testServer: TestServer
17
+
18
+ const getDbDir = () => {
19
+ dirCounter++
20
+ return dbPath + '/geolitelocator2-intervals' + dirCounter
21
+ }
22
+
23
+ beforeAll(async () => {
24
+ testServer = new TestServer()
25
+ await testServer.start(serverPort)
26
+ }, 120000)
27
+
28
+ afterEach(async () => {
29
+ locator!.stop()
30
+ testServer!.stop()
31
+ fs.rmSync(dbDir!, { recursive: true })
32
+ })
33
+
34
+ it('schedules a new check with a diffrent interval if monthly database check fails', async () => {
35
+
36
+ dbDir = getDbDir()
37
+
38
+ try {
39
+ fs.unlinkSync(dbDir + '/' + DB_FILENAME)
40
+ } catch (e) {
41
+ // ignore
42
+ }
43
+
44
+ locator = new GeoIpLocator(dbDir, 3000, 1000, mirrorUrl)
45
+
46
+ // start locator normally
47
+ await locator.start()
48
+
49
+ // delete the db
50
+ try {
51
+ fs.unlinkSync(dbDir + '/' + DB_FILENAME)
52
+ } catch (e) {
53
+ // ignore
54
+ }
55
+
56
+ // mock fetch to fail
57
+ const fetchMock = jest
58
+ .spyOn(globalThis, 'fetch')
59
+ .mockImplementation(async () => {
60
+ throw new Error('API is down')
61
+ })
62
+
63
+ // wait for the first check to happen
64
+ await wait(3500)
65
+
66
+ // normal check interval should have been run
67
+ // after 3000ms, this should have tried
68
+ // downloading the hash, but failed
69
+
70
+ expect(fetchMock).toHaveBeenCalledTimes(1)
71
+
72
+ // wait for the failure interval to happen
73
+ await wait(1200)
74
+
75
+ // failure interval should have been run after 1500ms from the failure
76
+ // it should have tried downloading the hash again and
77
+ // failed
78
+
79
+ expect(fetchMock).toHaveBeenCalledTimes(2)
80
+
81
+ // restore fetch
82
+ fetchMock.mockRestore()
83
+
84
+ // mock fetch again to just count the calls
85
+ const fetchMock2 = jest
86
+ .spyOn(globalThis, 'fetch')
87
+
88
+ // wait for failure interval to happen
89
+ await wait(1200)
90
+
91
+ // failure interval should have downloaded
92
+ // both the hash and the db
93
+
94
+ expect(fetchMock2).toHaveBeenCalledTimes(2)
95
+
96
+ // expect the db to be there
97
+ await waitForCondition(() => fs.existsSync(dbDir + '/' + DB_FILENAME), 10000)
98
+
99
+ // suomi.fi
100
+ const location = locator.lookup('62.241.198.245')
101
+ expect(location).toBeDefined()
102
+
103
+ // Helsinki, Finland
104
+ expect(location!.latitude).toBe(60.1797)
105
+ expect(location!.longitude).toBe(24.9344)
106
+ }, 60000)
107
+ })