@streamr/geoip-location 103.0.0-rc.0 → 103.0.0-rc.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +8 -2
- package/jest.config.ts +0 -1
- package/src/GeoIpLocator.ts +0 -109
- package/src/downloadGeoIpDatabase.ts +0 -185
- package/src/exports.ts +0 -1
- package/src/tarHelper.ts +0 -35
- package/test/helpers/TestServer.ts +0 -176
- package/test/helpers/fetchFileToMemory.ts +0 -7
- package/test/unit/GeoIpLocator-intervals.test.ts +0 -107
- package/test/unit/GeoIpLocator-no-network-at-monthly.test.ts +0 -55
- package/test/unit/GeoIpLocator-no-network-at-start.test.ts +0 -29
- package/test/unit/GeoIpLocator.test.ts +0 -127
- package/test/unit/downloadGeoIpDatabase.test.ts +0 -66
- package/test/unit/tarHelper.test.ts +0 -92
- package/tsconfig.browser.json +0 -12
- package/tsconfig.jest.json +0 -10
- package/tsconfig.json +0 -3
- package/tsconfig.node.json +0 -13
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@streamr/geoip-location",
|
|
3
|
-
"version": "103.0.0-rc.
|
|
3
|
+
"version": "103.0.0-rc.1",
|
|
4
4
|
"description": "Library for getting location information from IP addresses based on MaxMind GeoLite2 databases",
|
|
5
5
|
"repository": {
|
|
6
6
|
"type": "git",
|
|
@@ -14,6 +14,12 @@
|
|
|
14
14
|
"dist/src/downloadGeoIpDatabase.js": false
|
|
15
15
|
},
|
|
16
16
|
"types": "dist/src/exports.d.ts",
|
|
17
|
+
"files": [
|
|
18
|
+
"dist",
|
|
19
|
+
"!*.tsbuildinfo",
|
|
20
|
+
"README.md",
|
|
21
|
+
"LICENSE"
|
|
22
|
+
],
|
|
17
23
|
"license": "Apache-2.0",
|
|
18
24
|
"author": "Streamr Network AG <contact@streamr.network>",
|
|
19
25
|
"scripts": {
|
|
@@ -25,7 +31,7 @@
|
|
|
25
31
|
"test-unit": "jest test/unit"
|
|
26
32
|
},
|
|
27
33
|
"dependencies": {
|
|
28
|
-
"@streamr/utils": "103.0.0-rc.
|
|
34
|
+
"@streamr/utils": "103.0.0-rc.1",
|
|
29
35
|
"eventemitter3": "^5.0.0",
|
|
30
36
|
"long-timeout": "^0.1.1",
|
|
31
37
|
"mmdb-lib": "^2.1.1",
|
package/jest.config.ts
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
export { default } from '../../jest.config'
|
package/src/GeoIpLocator.ts
DELETED
|
@@ -1,109 +0,0 @@
|
|
|
1
|
-
import { Logger, filePathToNodeFormat } from '@streamr/utils'
|
|
2
|
-
import { CityResponse, Reader } from 'mmdb-lib'
|
|
3
|
-
import LongTimeout from 'long-timeout'
|
|
4
|
-
import { downloadGeoIpDatabase } from './downloadGeoIpDatabase'
|
|
5
|
-
|
|
6
|
-
const logger = new Logger(module)
|
|
7
|
-
|
|
8
|
-
interface GeoIpLookupResult {
|
|
9
|
-
latitude: number
|
|
10
|
-
longitude: number
|
|
11
|
-
}
|
|
12
|
-
|
|
13
|
-
// 30 days in milliseconds
|
|
14
|
-
const DEFAULT_DB_CHECK_INTERVAL = 30 * 24 * 60 * 60 * 1000
|
|
15
|
-
// 24 hours in milliseconds
|
|
16
|
-
const DEFAULT_DB_CHECK_ERROR_INTERVAL = 24 * 60 * 60 * 1000
|
|
17
|
-
|
|
18
|
-
export class GeoIpLocator {
|
|
19
|
-
private abortController: AbortController
|
|
20
|
-
private readonly geoIpDatabaseFolder: string
|
|
21
|
-
private readonly dbCheckInterval: number
|
|
22
|
-
private readonly dbCheckErrorInterval: number
|
|
23
|
-
private readonly mirrorUrl?: string
|
|
24
|
-
private reader?: Reader<CityResponse>
|
|
25
|
-
private dbCheckTimeout?: LongTimeout.Timeout
|
|
26
|
-
|
|
27
|
-
constructor(
|
|
28
|
-
geoIpDatabaseFolder: string,
|
|
29
|
-
dbCheckInterval = DEFAULT_DB_CHECK_INTERVAL,
|
|
30
|
-
dbCheckErrorInterval = DEFAULT_DB_CHECK_ERROR_INTERVAL,
|
|
31
|
-
mirrorUrl?: string
|
|
32
|
-
) {
|
|
33
|
-
this.abortController = new AbortController()
|
|
34
|
-
this.dbCheckInterval = dbCheckInterval
|
|
35
|
-
this.dbCheckErrorInterval = dbCheckErrorInterval
|
|
36
|
-
if (!geoIpDatabaseFolder.endsWith('/')) {
|
|
37
|
-
geoIpDatabaseFolder += '/'
|
|
38
|
-
}
|
|
39
|
-
this.geoIpDatabaseFolder = filePathToNodeFormat(geoIpDatabaseFolder)
|
|
40
|
-
this.mirrorUrl = mirrorUrl
|
|
41
|
-
}
|
|
42
|
-
|
|
43
|
-
private checkDatabase: () => Promise<void> = async () => {
|
|
44
|
-
if (this.reader === undefined) {
|
|
45
|
-
// if we do not have a reader, create a new one in any case
|
|
46
|
-
this.reader = await downloadGeoIpDatabase(this.geoIpDatabaseFolder, true, this.abortController.signal, this.mirrorUrl)
|
|
47
|
-
} else {
|
|
48
|
-
// if we already have a reader, create a new one only if db has changed
|
|
49
|
-
const newReader = await downloadGeoIpDatabase(this.geoIpDatabaseFolder, false, this.abortController.signal, this.mirrorUrl)
|
|
50
|
-
if (newReader !== undefined) {
|
|
51
|
-
this.reader = newReader
|
|
52
|
-
}
|
|
53
|
-
}
|
|
54
|
-
}
|
|
55
|
-
|
|
56
|
-
private scheduleCheck: (timeout: number) => void = async (timeout: number) => {
|
|
57
|
-
if (this.abortController.signal.aborted) {
|
|
58
|
-
return
|
|
59
|
-
}
|
|
60
|
-
this.dbCheckTimeout = LongTimeout.setTimeout(async () => {
|
|
61
|
-
try {
|
|
62
|
-
await this.checkDatabase()
|
|
63
|
-
this.scheduleCheck(this.dbCheckInterval)
|
|
64
|
-
} catch (err) {
|
|
65
|
-
logger.warn('GeoIpLocator: GeoIP database check failed', { err })
|
|
66
|
-
this.scheduleCheck(this.dbCheckErrorInterval)
|
|
67
|
-
}
|
|
68
|
-
}, timeout)
|
|
69
|
-
}
|
|
70
|
-
|
|
71
|
-
async start(): Promise<void> {
|
|
72
|
-
if (this.dbCheckTimeout !== undefined) {
|
|
73
|
-
return
|
|
74
|
-
}
|
|
75
|
-
await this.checkDatabase()
|
|
76
|
-
this.scheduleCheck(this.dbCheckInterval)
|
|
77
|
-
}
|
|
78
|
-
|
|
79
|
-
stop(): void {
|
|
80
|
-
if (this.dbCheckTimeout !== undefined) {
|
|
81
|
-
LongTimeout.clearTimeout(this.dbCheckTimeout)
|
|
82
|
-
}
|
|
83
|
-
this.abortController.abort()
|
|
84
|
-
}
|
|
85
|
-
|
|
86
|
-
lookup(ip: string): GeoIpLookupResult | undefined {
|
|
87
|
-
if (this.reader === undefined) {
|
|
88
|
-
logger.warn('GeoIpLocator: lookup called before database is ready (maybe start() was not called?')
|
|
89
|
-
return undefined
|
|
90
|
-
}
|
|
91
|
-
|
|
92
|
-
// If ip is falsy, the library will crash
|
|
93
|
-
// this might happen despite the ts typings because the ip address
|
|
94
|
-
// comes from the ws server socket and is not under our control
|
|
95
|
-
if (!ip) {
|
|
96
|
-
return undefined
|
|
97
|
-
}
|
|
98
|
-
|
|
99
|
-
const result = this.reader.get(ip)
|
|
100
|
-
if (!result?.location?.latitude || !result.location.longitude) {
|
|
101
|
-
return undefined
|
|
102
|
-
} else {
|
|
103
|
-
return {
|
|
104
|
-
latitude: result.location.latitude,
|
|
105
|
-
longitude: result.location.longitude
|
|
106
|
-
}
|
|
107
|
-
}
|
|
108
|
-
}
|
|
109
|
-
}
|
|
@@ -1,185 +0,0 @@
|
|
|
1
|
-
import crypto from 'crypto'
|
|
2
|
-
import fs from 'fs'
|
|
3
|
-
import { CityResponse, Reader } from 'mmdb-lib'
|
|
4
|
-
import { extractFileFromTarStream } from './tarHelper'
|
|
5
|
-
import { v4 } from 'uuid'
|
|
6
|
-
import { Logger } from '@streamr/utils'
|
|
7
|
-
|
|
8
|
-
const GEOIP_MIRROR_URL = 'https://raw.githubusercontent.com/GitSquared/node-geolite2-redist/master/redist/'
|
|
9
|
-
const DB_NAME = 'GeoLite2-City'
|
|
10
|
-
const TAR_SUFFFIX = '.tar.gz'
|
|
11
|
-
const DB_SUFFIX = '.mmdb'
|
|
12
|
-
const HASH_SUFFIX = '.mmdb.sha384'
|
|
13
|
-
|
|
14
|
-
const logger = new Logger(module)
|
|
15
|
-
|
|
16
|
-
const downloadNewDb = async (
|
|
17
|
-
url: string,
|
|
18
|
-
dbFolder: string,
|
|
19
|
-
remoteHash: string,
|
|
20
|
-
abortSignal: AbortSignal
|
|
21
|
-
): Promise<void> => {
|
|
22
|
-
// make a unique name for the temporary download folder
|
|
23
|
-
// in case there are multiple downloads happening at the same time
|
|
24
|
-
|
|
25
|
-
const uniqueName = v4()
|
|
26
|
-
const downloadFolder = dbFolder + '.download' + uniqueName
|
|
27
|
-
const dbFileName = DB_NAME + DB_SUFFIX
|
|
28
|
-
const dbFileInDownloadFolder = downloadFolder + '/' + dbFileName
|
|
29
|
-
const dbFileInDbFolder = dbFolder + dbFileName
|
|
30
|
-
|
|
31
|
-
let response: Response
|
|
32
|
-
|
|
33
|
-
try {
|
|
34
|
-
logger.debug('Downloading GeoIP database from: ' + url)
|
|
35
|
-
response = await fetch(url, { keepalive: false, signal: abortSignal })
|
|
36
|
-
} catch (e) {
|
|
37
|
-
// Catching and re-throwing as async exception
|
|
38
|
-
// here is necessary, synch exceptions cannot be caught by the caller
|
|
39
|
-
throw new Error('Fetch error when downloading ' + url + ', error: ' + e)
|
|
40
|
-
}
|
|
41
|
-
|
|
42
|
-
if (!response.ok) {
|
|
43
|
-
throw new Error('HTTP error when downloading ' + url + ', status: ' + response.status)
|
|
44
|
-
}
|
|
45
|
-
|
|
46
|
-
// extract the tarball to a temporary folder
|
|
47
|
-
|
|
48
|
-
try {
|
|
49
|
-
fs.mkdirSync(downloadFolder, { recursive: true })
|
|
50
|
-
} catch (e) {
|
|
51
|
-
throw new Error('Error creating temporary folder ' + downloadFolder + ', error: ' + e)
|
|
52
|
-
}
|
|
53
|
-
|
|
54
|
-
try {
|
|
55
|
-
await extractFileFromTarStream(dbFileName, response.body!, downloadFolder)
|
|
56
|
-
} catch (e) {
|
|
57
|
-
try {
|
|
58
|
-
fs.rmSync(downloadFolder, { recursive: true })
|
|
59
|
-
} catch {
|
|
60
|
-
// ignore error when removing the temporary folder
|
|
61
|
-
}
|
|
62
|
-
throw e
|
|
63
|
-
}
|
|
64
|
-
|
|
65
|
-
// check the hash of the extracted file
|
|
66
|
-
|
|
67
|
-
if (!isDbFileValid(dbFileInDownloadFolder, remoteHash)) {
|
|
68
|
-
try {
|
|
69
|
-
fs.rmSync(downloadFolder, { recursive: true })
|
|
70
|
-
} catch {
|
|
71
|
-
// ignore error when removing the temporary folder
|
|
72
|
-
}
|
|
73
|
-
throw new Error('Downloaded database hash does not match the expected hash')
|
|
74
|
-
}
|
|
75
|
-
|
|
76
|
-
try {
|
|
77
|
-
// move the extracted file to the correct location
|
|
78
|
-
fs.renameSync(dbFileInDownloadFolder, dbFileInDbFolder)
|
|
79
|
-
} catch (e) {
|
|
80
|
-
throw new Error('Error moving ' + dbFileInDownloadFolder + ' to ' + dbFileInDbFolder + ', error: ' + e)
|
|
81
|
-
} finally {
|
|
82
|
-
try {
|
|
83
|
-
fs.rmSync(downloadFolder, { recursive: true })
|
|
84
|
-
} catch {
|
|
85
|
-
// ignore error when removing the temporary folder
|
|
86
|
-
}
|
|
87
|
-
}
|
|
88
|
-
|
|
89
|
-
// set the db file permissions to rw only for the owner
|
|
90
|
-
|
|
91
|
-
try {
|
|
92
|
-
fs.chmodSync(dbFileInDbFolder, 0o600)
|
|
93
|
-
} catch (err) {
|
|
94
|
-
throw new Error('Error setting permissions on ' + dbFileInDbFolder + ', error: ' + err)
|
|
95
|
-
}
|
|
96
|
-
|
|
97
|
-
logger.debug('Downloaded GeoIP database to: ' + dbFileInDbFolder)
|
|
98
|
-
|
|
99
|
-
}
|
|
100
|
-
|
|
101
|
-
const downloadRemoteHash = async (remoteHashUrl: string, abortSignal: AbortSignal): Promise<string> => {
|
|
102
|
-
// download the hash of the latest GeoIP database using fetch as text and trim it
|
|
103
|
-
let response: Response
|
|
104
|
-
|
|
105
|
-
try {
|
|
106
|
-
logger.debug('Downloading GeoIP database hash from: ' + remoteHashUrl)
|
|
107
|
-
response = await fetch(remoteHashUrl, { signal: abortSignal })
|
|
108
|
-
} catch (e) {
|
|
109
|
-
// Catching and re-throwing as async exception
|
|
110
|
-
// here is necessary, synch exceptions cannot be caught by the caller
|
|
111
|
-
throw new Error('Fetch error when downloading ' + remoteHashUrl + ', error: ' + e)
|
|
112
|
-
}
|
|
113
|
-
|
|
114
|
-
if (!response.ok) {
|
|
115
|
-
throw new Error('HTTP error when downloading ' + remoteHashUrl + ', status: ' + response.status)
|
|
116
|
-
}
|
|
117
|
-
|
|
118
|
-
return (await response.text()).trim()
|
|
119
|
-
}
|
|
120
|
-
|
|
121
|
-
const isDbFileValid = (dbFile: string, remoteHash: string): boolean => {
|
|
122
|
-
// check if the local db exists and calculate its hash
|
|
123
|
-
|
|
124
|
-
try {
|
|
125
|
-
const db = fs.readFileSync(dbFile)
|
|
126
|
-
const localHash = crypto.createHash('sha384').update(db).digest('hex')
|
|
127
|
-
|
|
128
|
-
// if the hashes are different, download the latest database
|
|
129
|
-
if (localHash !== remoteHash) {
|
|
130
|
-
return false
|
|
131
|
-
} else {
|
|
132
|
-
return true
|
|
133
|
-
}
|
|
134
|
-
} catch {
|
|
135
|
-
// if the local db does not exist, or some other exception occurres db is not considered valid
|
|
136
|
-
return false
|
|
137
|
-
}
|
|
138
|
-
}
|
|
139
|
-
|
|
140
|
-
// returns a Reader if a new db was downloaded, or if the caller wants to force return a reader
|
|
141
|
-
// also if there was no need to download a new db
|
|
142
|
-
|
|
143
|
-
export const downloadGeoIpDatabase = async (
|
|
144
|
-
dbFolder: string,
|
|
145
|
-
forceReturnReader: boolean,
|
|
146
|
-
abortSignal: AbortSignal,
|
|
147
|
-
mirrorUrl?: string
|
|
148
|
-
): Promise<Reader<CityResponse> | undefined> => {
|
|
149
|
-
// This will throw if the download folder is not readable
|
|
150
|
-
if (!fs.existsSync(dbFolder)) {
|
|
151
|
-
// This will throw if the download folder is not writable
|
|
152
|
-
fs.mkdirSync(dbFolder, { recursive: true })
|
|
153
|
-
}
|
|
154
|
-
if (!dbFolder.endsWith('/')) {
|
|
155
|
-
dbFolder += '/'
|
|
156
|
-
}
|
|
157
|
-
let geoIpMirrorUrl = GEOIP_MIRROR_URL
|
|
158
|
-
if (mirrorUrl !== undefined) {
|
|
159
|
-
if (!mirrorUrl.endsWith('/')) {
|
|
160
|
-
mirrorUrl += '/'
|
|
161
|
-
}
|
|
162
|
-
geoIpMirrorUrl = mirrorUrl
|
|
163
|
-
}
|
|
164
|
-
const remoteHashUrl = geoIpMirrorUrl + DB_NAME + HASH_SUFFIX
|
|
165
|
-
const dbDownloadUrl = geoIpMirrorUrl + DB_NAME + TAR_SUFFFIX
|
|
166
|
-
const dbFileInDbFolder = dbFolder + DB_NAME + DB_SUFFIX
|
|
167
|
-
|
|
168
|
-
const remoteHash = await downloadRemoteHash(remoteHashUrl, abortSignal)
|
|
169
|
-
const dbValid = isDbFileValid(dbFileInDbFolder, remoteHash)
|
|
170
|
-
if (dbValid === false) {
|
|
171
|
-
await downloadNewDb(dbDownloadUrl, dbFolder, remoteHash, abortSignal)
|
|
172
|
-
// return new reader if db was downloaded
|
|
173
|
-
return new Reader<CityResponse>(fs.readFileSync(dbFileInDbFolder))
|
|
174
|
-
} else {
|
|
175
|
-
logger.debug('The hash of the local GeoIP database matches the remote hash, no need to download a new database')
|
|
176
|
-
}
|
|
177
|
-
if (forceReturnReader) {
|
|
178
|
-
// return reader also for old db the caller wants it
|
|
179
|
-
return new Reader<CityResponse>(fs.readFileSync(dbFileInDbFolder))
|
|
180
|
-
} else {
|
|
181
|
-
// return undefined if the db is already up to date
|
|
182
|
-
return undefined
|
|
183
|
-
}
|
|
184
|
-
}
|
|
185
|
-
|
package/src/exports.ts
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
export { GeoIpLocator } from './GeoIpLocator'
|
package/src/tarHelper.ts
DELETED
|
@@ -1,35 +0,0 @@
|
|
|
1
|
-
import { Readable, pipeline } from 'stream'
|
|
2
|
-
import { extract } from 'tar'
|
|
3
|
-
import { ReadableStream } from 'stream/web'
|
|
4
|
-
import NodePath from 'path' // use NodePath to avoid conflict with other 'path' symbols
|
|
5
|
-
import fs from 'fs'
|
|
6
|
-
|
|
7
|
-
const doExtractFileFromTarStream = (fileName: string, stream: ReadableStream<any>, downloadFolder: string): Promise<void> => {
|
|
8
|
-
return new Promise((resolve, reject) => {
|
|
9
|
-
try {
|
|
10
|
-
const nodeStream = Readable.fromWeb(stream)
|
|
11
|
-
pipeline(nodeStream,
|
|
12
|
-
extract({
|
|
13
|
-
cwd: downloadFolder,
|
|
14
|
-
filter: (entryPath: string): boolean => NodePath.basename(entryPath) === fileName,
|
|
15
|
-
strip: 1
|
|
16
|
-
}), (err) => {
|
|
17
|
-
if (err) {
|
|
18
|
-
reject(new Error('Error extracting tarball to ' + downloadFolder + ', error: ' + err))
|
|
19
|
-
} else {
|
|
20
|
-
resolve()
|
|
21
|
-
}
|
|
22
|
-
})
|
|
23
|
-
} catch (e) {
|
|
24
|
-
reject(new Error('Failed to create nodejs Readable from web stream: ' + e))
|
|
25
|
-
}
|
|
26
|
-
})
|
|
27
|
-
}
|
|
28
|
-
|
|
29
|
-
export const extractFileFromTarStream = async (fileName: string, stream: ReadableStream<any>, downloadFolder: string): Promise<void> => {
|
|
30
|
-
await doExtractFileFromTarStream(fileName, stream, downloadFolder)
|
|
31
|
-
if (!fs.existsSync(NodePath.join(downloadFolder, fileName))) {
|
|
32
|
-
throw new Error('File not found in tarball: ' + fileName)
|
|
33
|
-
}
|
|
34
|
-
}
|
|
35
|
-
|
|
@@ -1,176 +0,0 @@
|
|
|
1
|
-
import express from 'express'
|
|
2
|
-
import http from 'http'
|
|
3
|
-
import { Logger, wait } from '@streamr/utils'
|
|
4
|
-
import { fetchFileToMemory } from './fetchFileToMemory'
|
|
5
|
-
import fs from 'fs'
|
|
6
|
-
import { v4 } from 'uuid'
|
|
7
|
-
import EventEmitter from 'eventemitter3'
|
|
8
|
-
import { Duplex, pipeline } from 'stream'
|
|
9
|
-
|
|
10
|
-
const logger = new Logger(module)
|
|
11
|
-
|
|
12
|
-
type ExpressType = ReturnType<typeof express>
|
|
13
|
-
type ServerType = ReturnType<ExpressType['listen']>
|
|
14
|
-
|
|
15
|
-
const dbUrl = 'https://raw.githubusercontent.com/GitSquared/node-geolite2-redist/master/redist/GeoLite2-City.tar.gz'
|
|
16
|
-
const hashUrl = 'https://raw.githubusercontent.com/GitSquared/node-geolite2-redist/master/redist/GeoLite2-City.mmdb.sha384'
|
|
17
|
-
|
|
18
|
-
const dbFileName = '/GeoLite2-City.tar.gz'
|
|
19
|
-
const hashFileName = '/GeoLite2-City.mmdb.sha384'
|
|
20
|
-
|
|
21
|
-
const CACHE_PATH = '/tmp/geoip-location-test-cache'
|
|
22
|
-
|
|
23
|
-
export interface TestServerEvents {
|
|
24
|
-
closed: () => void
|
|
25
|
-
}
|
|
26
|
-
|
|
27
|
-
function bufferToStream(buf: Buffer) {
|
|
28
|
-
const tmp = new Duplex()
|
|
29
|
-
tmp.push(buf)
|
|
30
|
-
tmp.push(null)
|
|
31
|
-
return tmp
|
|
32
|
-
}
|
|
33
|
-
|
|
34
|
-
export class TestServer extends EventEmitter<TestServerEvents> {
|
|
35
|
-
private server?: ServerType
|
|
36
|
-
private abortController?: AbortController
|
|
37
|
-
|
|
38
|
-
private static hashData?: Uint8Array
|
|
39
|
-
private static dbData?: Uint8Array
|
|
40
|
-
|
|
41
|
-
private static async prefetchData(): Promise<void> {
|
|
42
|
-
|
|
43
|
-
TestServer.hashData = await fetchFileToMemory(hashUrl)
|
|
44
|
-
|
|
45
|
-
// check if db data is already prefetched to CACHE_PATH
|
|
46
|
-
|
|
47
|
-
if (fs.existsSync(CACHE_PATH + hashFileName) && fs.existsSync(CACHE_PATH + dbFileName)) {
|
|
48
|
-
// read hash data from CACHE_PATH
|
|
49
|
-
const cachedHash = fs.readFileSync(CACHE_PATH + hashFileName)
|
|
50
|
-
|
|
51
|
-
if (cachedHash.equals(TestServer.hashData)) {
|
|
52
|
-
TestServer.dbData = fs.readFileSync(CACHE_PATH + dbFileName)
|
|
53
|
-
return
|
|
54
|
-
}
|
|
55
|
-
}
|
|
56
|
-
|
|
57
|
-
// eslint-disable-next-line require-atomic-updates
|
|
58
|
-
TestServer.dbData = await fetchFileToMemory(dbUrl)
|
|
59
|
-
|
|
60
|
-
// save db and hash data to CACHE_PATH
|
|
61
|
-
try {
|
|
62
|
-
fs.mkdirSync(CACHE_PATH, { recursive: true })
|
|
63
|
-
} catch {
|
|
64
|
-
// ignore error when creating the cache folder
|
|
65
|
-
}
|
|
66
|
-
// ensure there is never an incomplete file in the fs
|
|
67
|
-
const uniqueName = v4()
|
|
68
|
-
|
|
69
|
-
fs.writeFileSync(CACHE_PATH + hashFileName + uniqueName, TestServer.hashData)
|
|
70
|
-
fs.renameSync(CACHE_PATH + hashFileName + uniqueName, CACHE_PATH + hashFileName)
|
|
71
|
-
|
|
72
|
-
fs.writeFileSync(CACHE_PATH + dbFileName + uniqueName, TestServer.dbData)
|
|
73
|
-
fs.renameSync(CACHE_PATH + dbFileName + uniqueName, CACHE_PATH + dbFileName)
|
|
74
|
-
}
|
|
75
|
-
|
|
76
|
-
private async writeDataKilobytesPerSecond(res: http.ServerResponse, data: Uint8Array, kilobytesPerSecond?: number): Promise<void> {
|
|
77
|
-
let delayMilliseconds = 1
|
|
78
|
-
|
|
79
|
-
if (kilobytesPerSecond) {
|
|
80
|
-
delayMilliseconds = 1000 / kilobytesPerSecond
|
|
81
|
-
}
|
|
82
|
-
const chuckSize = 1024
|
|
83
|
-
for (let i = 0; i < data.length && !this.abortController?.signal.aborted; i += chuckSize) {
|
|
84
|
-
let end = i + chuckSize
|
|
85
|
-
if (end > data.length) {
|
|
86
|
-
end = data.length
|
|
87
|
-
}
|
|
88
|
-
|
|
89
|
-
res.write(data.slice(i, end))
|
|
90
|
-
|
|
91
|
-
if (delayMilliseconds !== undefined) {
|
|
92
|
-
await wait(delayMilliseconds, this.abortController?.signal)
|
|
93
|
-
} else {
|
|
94
|
-
await wait(0, this.abortController?.signal)
|
|
95
|
-
}
|
|
96
|
-
}
|
|
97
|
-
}
|
|
98
|
-
|
|
99
|
-
startServer(port: number, kiloBytesPerSecond?: number): Promise<void> {
|
|
100
|
-
return new Promise((resolve, _reject) => {
|
|
101
|
-
const app = express()
|
|
102
|
-
|
|
103
|
-
app.get(dbFileName, (_req, res) => {
|
|
104
|
-
if (kiloBytesPerSecond !== undefined) {
|
|
105
|
-
res.setHeader('Content-Type', 'application/gzip')
|
|
106
|
-
this.writeDataKilobytesPerSecond(res, TestServer.dbData!,
|
|
107
|
-
kiloBytesPerSecond).then(() => {
|
|
108
|
-
res.end()
|
|
109
|
-
}).catch((_err) => {
|
|
110
|
-
res.end()
|
|
111
|
-
})
|
|
112
|
-
} else {
|
|
113
|
-
// send data without throttling from file
|
|
114
|
-
const readable = bufferToStream(Buffer.from(TestServer.dbData!))
|
|
115
|
-
pipeline(readable, res, (err) => {
|
|
116
|
-
if (err) {
|
|
117
|
-
logger.error('Error sending db file: ', { err })
|
|
118
|
-
}
|
|
119
|
-
})
|
|
120
|
-
}
|
|
121
|
-
})
|
|
122
|
-
|
|
123
|
-
app.get(hashFileName, (_req, res) => {
|
|
124
|
-
// always send hash data without throttling
|
|
125
|
-
const readable = bufferToStream(Buffer.from(TestServer.hashData!))
|
|
126
|
-
pipeline(readable, res, (err) => {
|
|
127
|
-
if (err) {
|
|
128
|
-
logger.error('Error sending hash file: ', { err })
|
|
129
|
-
}
|
|
130
|
-
})
|
|
131
|
-
})
|
|
132
|
-
|
|
133
|
-
this.server = app.listen(port, '127.0.0.1', () => {
|
|
134
|
-
logger.info('Test server is running on port ' + port)
|
|
135
|
-
|
|
136
|
-
// The server is not really ready after listen callback, possible bug in express
|
|
137
|
-
setTimeout(() => {
|
|
138
|
-
resolve()
|
|
139
|
-
}, 1000)
|
|
140
|
-
})
|
|
141
|
-
})
|
|
142
|
-
}
|
|
143
|
-
|
|
144
|
-
async start(port: number, kiloBytesPerSecond?: number): Promise<void> {
|
|
145
|
-
if (!TestServer.hashData || !TestServer.dbData) {
|
|
146
|
-
await TestServer.prefetchData()
|
|
147
|
-
}
|
|
148
|
-
|
|
149
|
-
if (this.server) {
|
|
150
|
-
throw new Error('Test server already running')
|
|
151
|
-
}
|
|
152
|
-
|
|
153
|
-
this.abortController = new AbortController()
|
|
154
|
-
await this.startServer(port, kiloBytesPerSecond)
|
|
155
|
-
}
|
|
156
|
-
|
|
157
|
-
stop(): Promise<void> {
|
|
158
|
-
return new Promise((resolve, _reject) => {
|
|
159
|
-
if (this.server) {
|
|
160
|
-
this.abortController!.abort()
|
|
161
|
-
|
|
162
|
-
this.server.close((err) => {
|
|
163
|
-
if (err) {
|
|
164
|
-
logger.warn('Error closing server: ', { err })
|
|
165
|
-
}
|
|
166
|
-
this.server = undefined
|
|
167
|
-
this.emit('closed')
|
|
168
|
-
resolve()
|
|
169
|
-
})
|
|
170
|
-
this.server.closeAllConnections()
|
|
171
|
-
} else {
|
|
172
|
-
resolve()
|
|
173
|
-
}
|
|
174
|
-
})
|
|
175
|
-
}
|
|
176
|
-
}
|
|
@@ -1,7 +0,0 @@
|
|
|
1
|
-
export const fetchFileToMemory = async (url: string): Promise<Uint8Array> => {
|
|
2
|
-
const response = await fetch(url)
|
|
3
|
-
if (!response.ok) {
|
|
4
|
-
throw new Error('HTTP error when downloading ' + url + ', status: ' + response.status)
|
|
5
|
-
}
|
|
6
|
-
return new Uint8Array(await response.arrayBuffer())
|
|
7
|
-
}
|
|
@@ -1,107 +0,0 @@
|
|
|
1
|
-
import { GeoIpLocator } from '../../src/GeoIpLocator'
|
|
2
|
-
import { wait, until } from '@streamr/utils'
|
|
3
|
-
import fs from 'fs'
|
|
4
|
-
import { TestServer } from '../helpers/TestServer'
|
|
5
|
-
|
|
6
|
-
describe('GeoIpLocator', () => {
|
|
7
|
-
|
|
8
|
-
const serverPort = 31991
|
|
9
|
-
const mirrorUrl = 'http://127.0.0.1:' + serverPort + '/'
|
|
10
|
-
|
|
11
|
-
const DB_FILENAME = 'GeoLite2-City.mmdb'
|
|
12
|
-
let dirCounter = 0
|
|
13
|
-
const dbPath = '/tmp'
|
|
14
|
-
let dbDir: string
|
|
15
|
-
let locator: GeoIpLocator
|
|
16
|
-
let testServer: TestServer
|
|
17
|
-
|
|
18
|
-
const getDbDir = () => {
|
|
19
|
-
dirCounter++
|
|
20
|
-
return dbPath + '/geolitelocator2-intervals' + dirCounter
|
|
21
|
-
}
|
|
22
|
-
|
|
23
|
-
beforeAll(async () => {
|
|
24
|
-
testServer = new TestServer()
|
|
25
|
-
await testServer.start(serverPort)
|
|
26
|
-
}, 120000)
|
|
27
|
-
|
|
28
|
-
afterEach(async () => {
|
|
29
|
-
locator!.stop()
|
|
30
|
-
testServer!.stop()
|
|
31
|
-
fs.rmSync(dbDir!, { recursive: true })
|
|
32
|
-
})
|
|
33
|
-
|
|
34
|
-
it('schedules a new check with a diffrent interval if monthly database check fails', async () => {
|
|
35
|
-
|
|
36
|
-
dbDir = getDbDir()
|
|
37
|
-
|
|
38
|
-
try {
|
|
39
|
-
fs.unlinkSync(dbDir + '/' + DB_FILENAME)
|
|
40
|
-
} catch {
|
|
41
|
-
// ignore
|
|
42
|
-
}
|
|
43
|
-
|
|
44
|
-
locator = new GeoIpLocator(dbDir, 3000, 1000, mirrorUrl)
|
|
45
|
-
|
|
46
|
-
// start locator normally
|
|
47
|
-
await locator.start()
|
|
48
|
-
|
|
49
|
-
// delete the db
|
|
50
|
-
try {
|
|
51
|
-
fs.unlinkSync(dbDir + '/' + DB_FILENAME)
|
|
52
|
-
} catch {
|
|
53
|
-
// ignore
|
|
54
|
-
}
|
|
55
|
-
|
|
56
|
-
// mock fetch to fail
|
|
57
|
-
const fetchMock = jest
|
|
58
|
-
.spyOn(globalThis, 'fetch')
|
|
59
|
-
.mockImplementation(async () => {
|
|
60
|
-
throw new Error('API is down')
|
|
61
|
-
})
|
|
62
|
-
|
|
63
|
-
// wait for the first check to happen
|
|
64
|
-
await wait(3500)
|
|
65
|
-
|
|
66
|
-
// normal check interval should have been run
|
|
67
|
-
// after 3000ms, this should have tried
|
|
68
|
-
// downloading the hash, but failed
|
|
69
|
-
|
|
70
|
-
expect(fetchMock).toHaveBeenCalledTimes(1)
|
|
71
|
-
|
|
72
|
-
// wait for the failure interval to happen
|
|
73
|
-
await wait(1200)
|
|
74
|
-
|
|
75
|
-
// failure interval should have been run after 1500ms from the failure
|
|
76
|
-
// it should have tried downloading the hash again and
|
|
77
|
-
// failed
|
|
78
|
-
|
|
79
|
-
expect(fetchMock).toHaveBeenCalledTimes(2)
|
|
80
|
-
|
|
81
|
-
// restore fetch
|
|
82
|
-
fetchMock.mockRestore()
|
|
83
|
-
|
|
84
|
-
// mock fetch again to just count the calls
|
|
85
|
-
const fetchMock2 = jest
|
|
86
|
-
.spyOn(globalThis, 'fetch')
|
|
87
|
-
|
|
88
|
-
// wait for failure interval to happen
|
|
89
|
-
await wait(1200)
|
|
90
|
-
|
|
91
|
-
// failure interval should have downloaded
|
|
92
|
-
// both the hash and the db
|
|
93
|
-
|
|
94
|
-
expect(fetchMock2).toHaveBeenCalledTimes(2)
|
|
95
|
-
|
|
96
|
-
// expect the db to be there
|
|
97
|
-
await until(() => fs.existsSync(dbDir + '/' + DB_FILENAME), 10000)
|
|
98
|
-
|
|
99
|
-
// helsinki.fi
|
|
100
|
-
const location = locator.lookup('128.214.222.50')
|
|
101
|
-
expect(location).toBeDefined()
|
|
102
|
-
|
|
103
|
-
// Helsinki, Finland
|
|
104
|
-
expect(location!.latitude).toBeCloseTo(60.1719, 1)
|
|
105
|
-
expect(location!.longitude).toBeCloseTo(24.9347, 1)
|
|
106
|
-
}, 60000)
|
|
107
|
-
})
|
|
@@ -1,55 +0,0 @@
|
|
|
1
|
-
import { GeoIpLocator } from '../../src/GeoIpLocator'
|
|
2
|
-
import fs from 'fs'
|
|
3
|
-
import { wait } from '@streamr/utils'
|
|
4
|
-
import { TestServer } from '../helpers/TestServer'
|
|
5
|
-
|
|
6
|
-
describe('GeoIpLocatorNoNetworkAtMonthly', () => {
|
|
7
|
-
let dirCounter = 0
|
|
8
|
-
const dbPath = '/tmp'
|
|
9
|
-
const serverPort = 31990
|
|
10
|
-
const serverUrl = 'http://localhost:' + serverPort + '/'
|
|
11
|
-
|
|
12
|
-
let testServer: TestServer
|
|
13
|
-
let dbDir: string
|
|
14
|
-
let locator: GeoIpLocator
|
|
15
|
-
|
|
16
|
-
const getDbDir = () => {
|
|
17
|
-
dirCounter++
|
|
18
|
-
return dbPath + '/geolite2-no-nw-monthly' + dirCounter
|
|
19
|
-
}
|
|
20
|
-
|
|
21
|
-
beforeAll(async () => {
|
|
22
|
-
testServer = new TestServer()
|
|
23
|
-
await testServer.start(serverPort)
|
|
24
|
-
dbDir = getDbDir()
|
|
25
|
-
locator = new GeoIpLocator(dbDir, 5000, 10000, serverUrl)
|
|
26
|
-
await locator.start()
|
|
27
|
-
}, 120000)
|
|
28
|
-
|
|
29
|
-
afterAll(async () => {
|
|
30
|
-
locator!.stop()
|
|
31
|
-
testServer!.stop()
|
|
32
|
-
fs.unlinkSync(dbDir + '/GeoLite2-City.mmdb')
|
|
33
|
-
fs.rmSync(dbDir!, { recursive: true })
|
|
34
|
-
})
|
|
35
|
-
|
|
36
|
-
it('does not crash if monthly database check fails because of fetch returning garbage', async () => {
|
|
37
|
-
const oldFetch = globalThis.fetch
|
|
38
|
-
const fetchMock = jest
|
|
39
|
-
.spyOn(globalThis, 'fetch')
|
|
40
|
-
.mockImplementation(() => oldFetch('https://streamr.network'))
|
|
41
|
-
|
|
42
|
-
await wait(10000)
|
|
43
|
-
|
|
44
|
-
fetchMock.mockRestore()
|
|
45
|
-
|
|
46
|
-
// helsinki.fi
|
|
47
|
-
const location = locator!.lookup('128.214.222.50')
|
|
48
|
-
expect(location).toBeDefined()
|
|
49
|
-
|
|
50
|
-
// Helsinki, Finland
|
|
51
|
-
expect(location!.latitude).toBeCloseTo(60.1719, 1)
|
|
52
|
-
expect(location!.longitude).toBeCloseTo(24.9347, 1)
|
|
53
|
-
|
|
54
|
-
}, 60000)
|
|
55
|
-
})
|
|
@@ -1,29 +0,0 @@
|
|
|
1
|
-
import { GeoIpLocator } from '../../src/GeoIpLocator'
|
|
2
|
-
|
|
3
|
-
describe('GeoIpLocator', () => {
|
|
4
|
-
let dirCounter = 0
|
|
5
|
-
const dbPath = '/tmp'
|
|
6
|
-
|
|
7
|
-
const getDbDir = () => {
|
|
8
|
-
dirCounter++
|
|
9
|
-
return dbPath + '/geolite2-no-nw-start' + dirCounter
|
|
10
|
-
}
|
|
11
|
-
|
|
12
|
-
it('start throws if no network connectivity', async () => {
|
|
13
|
-
const dbDir = getDbDir()
|
|
14
|
-
|
|
15
|
-
const fetchMock = jest
|
|
16
|
-
.spyOn(globalThis, 'fetch')
|
|
17
|
-
.mockImplementation(async () => {
|
|
18
|
-
throw new Error('API is down')
|
|
19
|
-
})
|
|
20
|
-
|
|
21
|
-
const locator = new GeoIpLocator(dbDir)
|
|
22
|
-
|
|
23
|
-
await expect(locator.start()).rejects.toThrow()
|
|
24
|
-
|
|
25
|
-
fetchMock.mockRestore()
|
|
26
|
-
|
|
27
|
-
locator.stop()
|
|
28
|
-
})
|
|
29
|
-
})
|
|
@@ -1,127 +0,0 @@
|
|
|
1
|
-
import { GeoIpLocator } from '../../src/GeoIpLocator'
|
|
2
|
-
import fs from 'fs'
|
|
3
|
-
import { wait } from '@streamr/utils'
|
|
4
|
-
import { TestServer } from '../helpers/TestServer'
|
|
5
|
-
|
|
6
|
-
describe('GeoIpLocator', () => {
|
|
7
|
-
let testServer: TestServer
|
|
8
|
-
let dirCounter = 0
|
|
9
|
-
const dbPath = '/tmp'
|
|
10
|
-
const serverPort = 31992
|
|
11
|
-
const serverUrl = 'http://127.0.0.1:' + serverPort + '/'
|
|
12
|
-
|
|
13
|
-
const getDbDir = () => {
|
|
14
|
-
dirCounter++
|
|
15
|
-
return dbPath + '/geolite2-' + dirCounter
|
|
16
|
-
}
|
|
17
|
-
|
|
18
|
-
beforeAll(async () => {
|
|
19
|
-
testServer = new TestServer()
|
|
20
|
-
await testServer.start(serverPort)
|
|
21
|
-
}, 120000)
|
|
22
|
-
|
|
23
|
-
afterAll(async () => {
|
|
24
|
-
testServer!.stop()
|
|
25
|
-
})
|
|
26
|
-
|
|
27
|
-
describe('tests with normal startup and shutdown', () => {
|
|
28
|
-
let dbDir: string
|
|
29
|
-
let locator: GeoIpLocator
|
|
30
|
-
|
|
31
|
-
it('can locate an IP address', async () => {
|
|
32
|
-
dbDir = getDbDir()
|
|
33
|
-
locator = new GeoIpLocator(dbDir, 5000, 5000, serverUrl)
|
|
34
|
-
await locator.start()
|
|
35
|
-
|
|
36
|
-
// helsinki.fi
|
|
37
|
-
const location = locator.lookup('128.214.222.50')
|
|
38
|
-
|
|
39
|
-
expect(location).toBeDefined()
|
|
40
|
-
|
|
41
|
-
// Helsinki, Finland
|
|
42
|
-
expect(location!.latitude).toBeCloseTo(60.1719, 1)
|
|
43
|
-
expect(location!.longitude).toBeCloseTo(24.9347, 1)
|
|
44
|
-
|
|
45
|
-
locator.stop()
|
|
46
|
-
fs.unlinkSync(dbDir + '/GeoLite2-City.mmdb')
|
|
47
|
-
fs.rmSync(dbDir, { recursive: true })
|
|
48
|
-
})
|
|
49
|
-
|
|
50
|
-
it('returns undefined with invalid IP address', async () => {
|
|
51
|
-
dbDir = getDbDir()
|
|
52
|
-
locator = new GeoIpLocator(dbDir, 5000, 5000, serverUrl)
|
|
53
|
-
await locator.start()
|
|
54
|
-
|
|
55
|
-
expect(locator.lookup('invalid')).toBeUndefined()
|
|
56
|
-
expect(locator.lookup('')).toBeUndefined()
|
|
57
|
-
expect(locator.lookup(undefined as unknown as string)).toBeUndefined()
|
|
58
|
-
expect(locator.lookup(null as unknown as string)).toBeUndefined()
|
|
59
|
-
expect(locator.lookup('127.0.0.1')).toBeUndefined()
|
|
60
|
-
|
|
61
|
-
locator.stop()
|
|
62
|
-
fs.unlinkSync(dbDir + '/GeoLite2-City.mmdb')
|
|
63
|
-
fs.rmSync(dbDir, { recursive: true })
|
|
64
|
-
})
|
|
65
|
-
|
|
66
|
-
it('works also after monthly check', async () => {
|
|
67
|
-
dbDir = getDbDir()
|
|
68
|
-
locator = new GeoIpLocator(dbDir, 5000, 5000, serverUrl)
|
|
69
|
-
await locator.start()
|
|
70
|
-
|
|
71
|
-
await wait(7000)
|
|
72
|
-
|
|
73
|
-
// helsinki.fi
|
|
74
|
-
const location = locator.lookup('128.214.222.50')
|
|
75
|
-
expect(location).toBeDefined()
|
|
76
|
-
|
|
77
|
-
// Helsinki, Finland
|
|
78
|
-
expect(location!.latitude).toBeCloseTo(60.1719, 1)
|
|
79
|
-
expect(location!.longitude).toBeCloseTo(24.9347, 1)
|
|
80
|
-
|
|
81
|
-
locator.stop()
|
|
82
|
-
fs.unlinkSync(dbDir + '/GeoLite2-City.mmdb')
|
|
83
|
-
fs.rmSync(dbDir, { recursive: true })
|
|
84
|
-
}, 60000)
|
|
85
|
-
|
|
86
|
-
it('works also after monthly check if db gets deleted before the check', async () => {
|
|
87
|
-
dbDir = getDbDir()
|
|
88
|
-
locator = new GeoIpLocator(dbDir, 5000, 5000, serverUrl)
|
|
89
|
-
await locator.start()
|
|
90
|
-
|
|
91
|
-
fs.unlinkSync(dbDir + '/GeoLite2-City.mmdb')
|
|
92
|
-
|
|
93
|
-
await wait(10000)
|
|
94
|
-
|
|
95
|
-
// helsinki.fi
|
|
96
|
-
const location = locator.lookup('128.214.222.50')
|
|
97
|
-
expect(location).toBeDefined()
|
|
98
|
-
|
|
99
|
-
// Helsinki, Finland
|
|
100
|
-
expect(location!.latitude).toBeCloseTo(60.1719, 1)
|
|
101
|
-
expect(location!.longitude).toBeCloseTo(24.9347, 1)
|
|
102
|
-
|
|
103
|
-
locator.stop()
|
|
104
|
-
fs.unlinkSync(dbDir + '/GeoLite2-City.mmdb')
|
|
105
|
-
fs.rmSync(dbDir, { recursive: true })
|
|
106
|
-
}, 60000)
|
|
107
|
-
})
|
|
108
|
-
|
|
109
|
-
describe('tests with failing startup', () => {
|
|
110
|
-
it('returns undefined if not started', async () => {
|
|
111
|
-
const dbDir = getDbDir()
|
|
112
|
-
const locator = new GeoIpLocator(dbDir)
|
|
113
|
-
const location = locator.lookup('128.214.222.50')
|
|
114
|
-
expect(location).toBeUndefined()
|
|
115
|
-
})
|
|
116
|
-
|
|
117
|
-
it('start() throws if database path does not exist', async () => {
|
|
118
|
-
const locator = new GeoIpLocator('/nonexistent')
|
|
119
|
-
await expect(locator.start()).rejects.toThrow()
|
|
120
|
-
})
|
|
121
|
-
|
|
122
|
-
it('start() throws if database path is not writable', async () => {
|
|
123
|
-
const locator = new GeoIpLocator('/etc')
|
|
124
|
-
await expect(locator.start()).rejects.toThrow()
|
|
125
|
-
})
|
|
126
|
-
})
|
|
127
|
-
})
|
|
@@ -1,66 +0,0 @@
|
|
|
1
|
-
import { downloadGeoIpDatabase } from '../../src/downloadGeoIpDatabase'
|
|
2
|
-
import fs from 'fs'
|
|
3
|
-
import { TestServer } from '../helpers/TestServer'
|
|
4
|
-
|
|
5
|
-
describe('downloadGeoIpDatabase', () => {
|
|
6
|
-
const serverPort = 31993
|
|
7
|
-
const mirrorUrl = 'http://127.0.0.1:' + serverPort + '/'
|
|
8
|
-
|
|
9
|
-
let testServer: TestServer
|
|
10
|
-
const abortController = new AbortController()
|
|
11
|
-
const path = '/tmp/downloadGeoIpDatabaseTest/'
|
|
12
|
-
|
|
13
|
-
beforeAll(async () => {
|
|
14
|
-
testServer = new TestServer()
|
|
15
|
-
await testServer.start(serverPort)
|
|
16
|
-
}, 120000)
|
|
17
|
-
|
|
18
|
-
afterAll(async () => {
|
|
19
|
-
testServer!.stop()
|
|
20
|
-
})
|
|
21
|
-
|
|
22
|
-
beforeEach(() => {
|
|
23
|
-
try {
|
|
24
|
-
fs.rmSync(path, { recursive: true })
|
|
25
|
-
} catch {
|
|
26
|
-
// ignore error when removing the test data
|
|
27
|
-
}
|
|
28
|
-
})
|
|
29
|
-
|
|
30
|
-
it('downloads the database with correct file permissions', async () => {
|
|
31
|
-
const reader = await downloadGeoIpDatabase(path, false, abortController.signal, mirrorUrl)
|
|
32
|
-
|
|
33
|
-
expect(fs.existsSync(path)).toBe(true)
|
|
34
|
-
expect(fs.existsSync(path + '/GeoLite2-City.mmdb')).toBe(true)
|
|
35
|
-
|
|
36
|
-
// https://www.martin-brennan.com/nodejs-file-permissions-fstat/
|
|
37
|
-
const permissions = fs.statSync(path + '/GeoLite2-City.mmdb').mode & 0o777
|
|
38
|
-
|
|
39
|
-
// on windows the permissions might be 0o666
|
|
40
|
-
expect(permissions === 0o600 || permissions === 0o666).toBe(true)
|
|
41
|
-
expect(reader).toBeDefined()
|
|
42
|
-
}, 60000)
|
|
43
|
-
|
|
44
|
-
it('throws if the path is not writable', async () => {
|
|
45
|
-
const path = '/etc/downloadGeoIpDatabaseTest/'
|
|
46
|
-
await expect(downloadGeoIpDatabase(path, false, abortController.signal, mirrorUrl)).rejects.toThrow()
|
|
47
|
-
}, 60000)
|
|
48
|
-
|
|
49
|
-
it('throws if the path does not exist', async () => {
|
|
50
|
-
const path = '/nonexistent/downloadGeoIpDatabaseTest/'
|
|
51
|
-
await expect(downloadGeoIpDatabase(path, false, abortController.signal, mirrorUrl)).rejects.toThrow()
|
|
52
|
-
}, 60000)
|
|
53
|
-
|
|
54
|
-
it('does not download the database if it is already up to date', async () => {
|
|
55
|
-
const path = '/tmp/downloadGeoIpDatabaseTest/'
|
|
56
|
-
|
|
57
|
-
const newReader = await downloadGeoIpDatabase(path, false, abortController.signal, mirrorUrl)
|
|
58
|
-
expect(newReader).toBeDefined()
|
|
59
|
-
|
|
60
|
-
const newReader2 = await downloadGeoIpDatabase(path, false, abortController.signal, mirrorUrl)
|
|
61
|
-
expect(newReader2).toBeUndefined()
|
|
62
|
-
|
|
63
|
-
const newReader3 = await downloadGeoIpDatabase(path, true, abortController.signal, mirrorUrl)
|
|
64
|
-
expect(newReader3).toBeDefined()
|
|
65
|
-
}, 60000)
|
|
66
|
-
})
|
|
@@ -1,92 +0,0 @@
|
|
|
1
|
-
import { waitForEvent3 } from '@streamr/utils'
|
|
2
|
-
import { extractFileFromTarStream } from '../../src/tarHelper'
|
|
3
|
-
import { TestServer, TestServerEvents } from '../helpers/TestServer'
|
|
4
|
-
|
|
5
|
-
describe('tarHelper', () => {
|
|
6
|
-
const serverUrl = 'http://127.0.0.1:'
|
|
7
|
-
const dbFileName = 'GeoLite2-City.mmdb'
|
|
8
|
-
const tarFileName = 'GeoLite2-City.tar.gz'
|
|
9
|
-
const hashFileName = 'GeoLite2-City.mmdb.sha384'
|
|
10
|
-
|
|
11
|
-
let testServer: TestServer
|
|
12
|
-
|
|
13
|
-
afterEach(async () => {
|
|
14
|
-
await testServer!.stop()
|
|
15
|
-
})
|
|
16
|
-
|
|
17
|
-
describe('testsWithNormalServer', () => {
|
|
18
|
-
const serverPort = 3197
|
|
19
|
-
|
|
20
|
-
beforeEach(async () => {
|
|
21
|
-
testServer = new TestServer()
|
|
22
|
-
await testServer.start(serverPort)
|
|
23
|
-
})
|
|
24
|
-
|
|
25
|
-
it('happy path', async () => {
|
|
26
|
-
const url = serverUrl + serverPort + '/' + tarFileName
|
|
27
|
-
const result = await fetch(url, { keepalive: false })
|
|
28
|
-
|
|
29
|
-
await extractFileFromTarStream(dbFileName, result.body!, '/tmp')
|
|
30
|
-
|
|
31
|
-
})
|
|
32
|
-
|
|
33
|
-
it('throws asynchonously if the stream contains garbage', async () => {
|
|
34
|
-
const url = serverUrl + serverPort + '/' + hashFileName
|
|
35
|
-
const result = await fetch(url)
|
|
36
|
-
|
|
37
|
-
await expect(extractFileFromTarStream(dbFileName, result.body!, '/tmp'))
|
|
38
|
-
.rejects
|
|
39
|
-
.toThrow('TAR_BAD_ARCHIVE: Unrecognized archive format')
|
|
40
|
-
|
|
41
|
-
})
|
|
42
|
-
|
|
43
|
-
it('throws asynchonously if the stream does not contain the desired file', async () => {
|
|
44
|
-
const url = serverUrl + serverPort + '/' + tarFileName
|
|
45
|
-
const result = await fetch(url)
|
|
46
|
-
|
|
47
|
-
await expect(extractFileFromTarStream('nonexisting-filename', result.body!, '/tmp'))
|
|
48
|
-
.rejects
|
|
49
|
-
.toThrow('File not found in tarball: nonexisting-filename')
|
|
50
|
-
|
|
51
|
-
})
|
|
52
|
-
})
|
|
53
|
-
|
|
54
|
-
describe('testsWithThrottledServer', () => {
|
|
55
|
-
const serverPort = 3198
|
|
56
|
-
|
|
57
|
-
beforeEach(async () => {
|
|
58
|
-
testServer = new TestServer()
|
|
59
|
-
await testServer.start(serverPort, 1)
|
|
60
|
-
})
|
|
61
|
-
|
|
62
|
-
it('throws asynchonously if the stream gets aborted', async () => {
|
|
63
|
-
const abortController = new AbortController()
|
|
64
|
-
|
|
65
|
-
setTimeout(() => {
|
|
66
|
-
abortController.abort()
|
|
67
|
-
}, 5000)
|
|
68
|
-
|
|
69
|
-
const url = serverUrl + serverPort + '/' + tarFileName
|
|
70
|
-
const result = await fetch(url, { signal: abortController.signal })
|
|
71
|
-
|
|
72
|
-
await expect(extractFileFromTarStream(dbFileName, result.body!, '/tmp'))
|
|
73
|
-
.rejects
|
|
74
|
-
.toThrow('AbortError: This operation was aborted')
|
|
75
|
-
|
|
76
|
-
}, 15 * 1000)
|
|
77
|
-
|
|
78
|
-
it('throws asynchonously if server gets shut down', async () => {
|
|
79
|
-
const closedPromise = waitForEvent3<TestServerEvents>(testServer!, 'closed', 10000)
|
|
80
|
-
setTimeout(async () => {
|
|
81
|
-
await testServer!.stop()
|
|
82
|
-
}, 5000)
|
|
83
|
-
|
|
84
|
-
const url = serverUrl + serverPort + '/' + tarFileName
|
|
85
|
-
const result = await fetch(url)
|
|
86
|
-
await expect(extractFileFromTarStream(dbFileName, result.body!, '/tmp'))
|
|
87
|
-
.rejects
|
|
88
|
-
.toThrow('Error extracting tarball')
|
|
89
|
-
await closedPromise
|
|
90
|
-
}, 15 * 1000)
|
|
91
|
-
})
|
|
92
|
-
})
|
package/tsconfig.browser.json
DELETED
package/tsconfig.jest.json
DELETED
package/tsconfig.json
DELETED
package/tsconfig.node.json
DELETED