@betterdb/monitor 0.4.2 → 0.4.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/assets/build-info.json +5 -0
- package/assets/server/index.js +112 -0
- package/assets/server/index1.js +86 -0
- package/assets/server/worker.js +179 -0
- package/assets/web/assets/index-B-gq6Czz.js +312 -0
- package/assets/web/assets/index-g1z8DoV3.css +1 -0
- package/assets/web/index.html +14 -0
- package/assets/web/site.webmanifest +19 -0
- package/assets/web/symbol-white.svg +1 -0
- package/dist/index.js +749 -0
- package/package.json +1 -1
|
@@ -0,0 +1,86 @@
|
|
|
1
|
+
/**
|
|
2
|
+
Node.js specific entry point.
|
|
3
|
+
*/
|
|
4
|
+
|
|
5
|
+
import {ReadableStream as WebReadableStream} from 'node:stream/web';
|
|
6
|
+
import {pipeline, PassThrough, Readable} from 'node:stream';
|
|
7
|
+
import * as strtok3 from 'strtok3';
|
|
8
|
+
import {FileTypeParser as DefaultFileTypeParser, reasonableDetectionSizeInBytes} from './core.js';
|
|
9
|
+
|
|
10
|
+
export class FileTypeParser extends DefaultFileTypeParser {
|
|
11
|
+
async fromStream(stream) {
|
|
12
|
+
const tokenizer = await (stream instanceof WebReadableStream ? strtok3.fromWebStream(stream, this.tokenizerOptions) : strtok3.fromStream(stream, this.tokenizerOptions));
|
|
13
|
+
try {
|
|
14
|
+
return await super.fromTokenizer(tokenizer);
|
|
15
|
+
} finally {
|
|
16
|
+
await tokenizer.close();
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
async fromFile(path) {
|
|
21
|
+
const tokenizer = await strtok3.fromFile(path);
|
|
22
|
+
try {
|
|
23
|
+
return await super.fromTokenizer(tokenizer);
|
|
24
|
+
} finally {
|
|
25
|
+
await tokenizer.close();
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
async toDetectionStream(readableStream, options = {}) {
|
|
30
|
+
if (!(readableStream instanceof Readable)) {
|
|
31
|
+
return super.toDetectionStream(readableStream, options);
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
const {sampleSize = reasonableDetectionSizeInBytes} = options;
|
|
35
|
+
|
|
36
|
+
return new Promise((resolve, reject) => {
|
|
37
|
+
readableStream.on('error', reject);
|
|
38
|
+
|
|
39
|
+
readableStream.once('readable', () => {
|
|
40
|
+
(async () => {
|
|
41
|
+
try {
|
|
42
|
+
// Set up output stream
|
|
43
|
+
const pass = new PassThrough();
|
|
44
|
+
const outputStream = pipeline ? pipeline(readableStream, pass, () => {}) : readableStream.pipe(pass);
|
|
45
|
+
|
|
46
|
+
// Read the input stream and detect the filetype
|
|
47
|
+
const chunk = readableStream.read(sampleSize) ?? readableStream.read() ?? new Uint8Array(0);
|
|
48
|
+
try {
|
|
49
|
+
pass.fileType = await this.fromBuffer(chunk);
|
|
50
|
+
} catch (error) {
|
|
51
|
+
if (error instanceof strtok3.EndOfStreamError) {
|
|
52
|
+
pass.fileType = undefined;
|
|
53
|
+
} else {
|
|
54
|
+
reject(error);
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
resolve(outputStream);
|
|
59
|
+
} catch (error) {
|
|
60
|
+
reject(error);
|
|
61
|
+
}
|
|
62
|
+
})();
|
|
63
|
+
});
|
|
64
|
+
});
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
export async function fileTypeFromFile(path, options) {
|
|
69
|
+
return (new FileTypeParser(options)).fromFile(path, options);
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
export async function fileTypeFromStream(stream, options) {
|
|
73
|
+
return (new FileTypeParser(options)).fromStream(stream);
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
export async function fileTypeStream(readableStream, options = {}) {
|
|
77
|
+
return new FileTypeParser(options).toDetectionStream(readableStream, options);
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
export {
|
|
81
|
+
fileTypeFromTokenizer,
|
|
82
|
+
fileTypeFromBuffer,
|
|
83
|
+
fileTypeFromBlob,
|
|
84
|
+
supportedMimeTypes,
|
|
85
|
+
supportedExtensions,
|
|
86
|
+
} from './core.js';
|
|
@@ -0,0 +1,179 @@
|
|
|
1
|
+
'use strict'
|
|
2
|
+
|
|
3
|
+
const { realImport, realRequire } = require('real-require')
|
|
4
|
+
const { workerData, parentPort } = require('worker_threads')
|
|
5
|
+
const { WRITE_INDEX, READ_INDEX } = require('./indexes')
|
|
6
|
+
const { waitDiff } = require('./wait')
|
|
7
|
+
|
|
8
|
+
const {
|
|
9
|
+
dataBuf,
|
|
10
|
+
filename,
|
|
11
|
+
stateBuf
|
|
12
|
+
} = workerData
|
|
13
|
+
|
|
14
|
+
let destination
|
|
15
|
+
|
|
16
|
+
const state = new Int32Array(stateBuf)
|
|
17
|
+
const data = Buffer.from(dataBuf)
|
|
18
|
+
|
|
19
|
+
// Keep the event loop alive - Atomics.waitAsync promises don't prevent worker exit
|
|
20
|
+
const keepAlive = setInterval(() => {}, 60 * 60 * 1000)
|
|
21
|
+
|
|
22
|
+
async function start () {
|
|
23
|
+
let worker
|
|
24
|
+
try {
|
|
25
|
+
worker = (await realImport(filename))
|
|
26
|
+
} catch (error) {
|
|
27
|
+
// A yarn user that tries to start a ThreadStream for an external module
|
|
28
|
+
// provides a filename pointing to a zip file.
|
|
29
|
+
// eg. require.resolve('pino-elasticsearch') // returns /foo/pino-elasticsearch-npm-6.1.0-0c03079478-6915435172.zip/bar.js
|
|
30
|
+
// The `import` will fail to try to load it.
|
|
31
|
+
// This catch block executes the `require` fallback to load the module correctly.
|
|
32
|
+
// In fact, yarn modifies the `require` function to manage the zipped path.
|
|
33
|
+
// More details at https://github.com/pinojs/pino/pull/1113
|
|
34
|
+
// The error codes may change based on the node.js version (ENOTDIR > 12, ERR_MODULE_NOT_FOUND <= 12 )
|
|
35
|
+
if ((error.code === 'ENOTDIR' || error.code === 'ERR_MODULE_NOT_FOUND') &&
|
|
36
|
+
filename.startsWith('file://')) {
|
|
37
|
+
worker = realRequire(decodeURIComponent(filename.replace('file://', '')))
|
|
38
|
+
} else if (error.code === undefined || error.code === 'ERR_VM_DYNAMIC_IMPORT_CALLBACK_MISSING') {
|
|
39
|
+
// When bundled with pkg, an undefined error is thrown when called with realImport
|
|
40
|
+
// When bundled with pkg and using node v20, an ERR_VM_DYNAMIC_IMPORT_CALLBACK_MISSING error is thrown when called with realImport
|
|
41
|
+
// More info at: https://github.com/pinojs/thread-stream/issues/143
|
|
42
|
+
try {
|
|
43
|
+
worker = realRequire(decodeURIComponent(filename.replace(process.platform === 'win32' ? 'file:///' : 'file://', '')))
|
|
44
|
+
} catch {
|
|
45
|
+
throw error
|
|
46
|
+
}
|
|
47
|
+
} else if (filename.endsWith('.ts') || filename.endsWith('.cts')) {
|
|
48
|
+
// Native TypeScript import failed (type stripping not enabled).
|
|
49
|
+
// Fall back to ts-node for TypeScript files.
|
|
50
|
+
try {
|
|
51
|
+
if (!process[Symbol.for('ts-node.register.instance')]) {
|
|
52
|
+
realRequire('ts-node/register')
|
|
53
|
+
} else if (process.env.TS_NODE_DEV) {
|
|
54
|
+
realRequire('ts-node-dev')
|
|
55
|
+
}
|
|
56
|
+
worker = realRequire(decodeURIComponent(filename.replace(process.platform === 'win32' ? 'file:///' : 'file://', '')))
|
|
57
|
+
} catch {
|
|
58
|
+
throw error
|
|
59
|
+
}
|
|
60
|
+
} else {
|
|
61
|
+
throw error
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
// Depending on how the default export is performed, and on how the code is
|
|
66
|
+
// transpiled, we may find cases of two nested "default" objects.
|
|
67
|
+
// See https://github.com/pinojs/pino/issues/1243#issuecomment-982774762
|
|
68
|
+
if (typeof worker === 'object') worker = worker.default
|
|
69
|
+
if (typeof worker === 'object') worker = worker.default
|
|
70
|
+
|
|
71
|
+
destination = await worker(workerData.workerData)
|
|
72
|
+
|
|
73
|
+
destination.on('error', function (err) {
|
|
74
|
+
Atomics.store(state, WRITE_INDEX, -2)
|
|
75
|
+
Atomics.notify(state, WRITE_INDEX)
|
|
76
|
+
|
|
77
|
+
Atomics.store(state, READ_INDEX, -2)
|
|
78
|
+
Atomics.notify(state, READ_INDEX)
|
|
79
|
+
|
|
80
|
+
parentPort.postMessage({
|
|
81
|
+
code: 'ERROR',
|
|
82
|
+
err
|
|
83
|
+
})
|
|
84
|
+
})
|
|
85
|
+
|
|
86
|
+
destination.on('close', function () {
|
|
87
|
+
// process._rawDebug('worker close emitted')
|
|
88
|
+
const end = Atomics.load(state, WRITE_INDEX)
|
|
89
|
+
Atomics.store(state, READ_INDEX, end)
|
|
90
|
+
Atomics.notify(state, READ_INDEX)
|
|
91
|
+
clearInterval(keepAlive)
|
|
92
|
+
setImmediate(() => {
|
|
93
|
+
process.exit(0)
|
|
94
|
+
})
|
|
95
|
+
})
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
// No .catch() handler,
|
|
99
|
+
// in case there is an error it goes
|
|
100
|
+
// to unhandledRejection
|
|
101
|
+
start().then(function () {
|
|
102
|
+
parentPort.postMessage({
|
|
103
|
+
code: 'READY'
|
|
104
|
+
})
|
|
105
|
+
|
|
106
|
+
process.nextTick(run)
|
|
107
|
+
})
|
|
108
|
+
|
|
109
|
+
function run () {
|
|
110
|
+
const current = Atomics.load(state, READ_INDEX)
|
|
111
|
+
const end = Atomics.load(state, WRITE_INDEX)
|
|
112
|
+
|
|
113
|
+
// process._rawDebug(`pre state ${current} ${end}`)
|
|
114
|
+
|
|
115
|
+
if (end === current) {
|
|
116
|
+
if (end === data.length) {
|
|
117
|
+
waitDiff(state, READ_INDEX, end, Infinity, run)
|
|
118
|
+
} else {
|
|
119
|
+
waitDiff(state, WRITE_INDEX, end, Infinity, run)
|
|
120
|
+
}
|
|
121
|
+
return
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
// process._rawDebug(`post state ${current} ${end}`)
|
|
125
|
+
|
|
126
|
+
if (end === -1) {
|
|
127
|
+
// process._rawDebug('end')
|
|
128
|
+
destination.end()
|
|
129
|
+
return
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
const toWrite = data.toString('utf8', current, end)
|
|
133
|
+
// process._rawDebug('worker writing: ' + toWrite)
|
|
134
|
+
|
|
135
|
+
const res = destination.write(toWrite)
|
|
136
|
+
|
|
137
|
+
if (res) {
|
|
138
|
+
Atomics.store(state, READ_INDEX, end)
|
|
139
|
+
Atomics.notify(state, READ_INDEX)
|
|
140
|
+
setImmediate(run)
|
|
141
|
+
} else {
|
|
142
|
+
destination.once('drain', function () {
|
|
143
|
+
Atomics.store(state, READ_INDEX, end)
|
|
144
|
+
Atomics.notify(state, READ_INDEX)
|
|
145
|
+
run()
|
|
146
|
+
})
|
|
147
|
+
}
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
process.on('unhandledRejection', function (err) {
|
|
151
|
+
parentPort.postMessage({
|
|
152
|
+
code: 'ERROR',
|
|
153
|
+
err
|
|
154
|
+
})
|
|
155
|
+
process.exit(1)
|
|
156
|
+
})
|
|
157
|
+
|
|
158
|
+
process.on('uncaughtException', function (err) {
|
|
159
|
+
parentPort.postMessage({
|
|
160
|
+
code: 'ERROR',
|
|
161
|
+
err
|
|
162
|
+
})
|
|
163
|
+
process.exit(1)
|
|
164
|
+
})
|
|
165
|
+
|
|
166
|
+
process.once('exit', exitCode => {
|
|
167
|
+
if (exitCode !== 0) {
|
|
168
|
+
process.exit(exitCode)
|
|
169
|
+
return
|
|
170
|
+
}
|
|
171
|
+
if (destination?.writableNeedDrain && !destination?.writableEnded) {
|
|
172
|
+
parentPort.postMessage({
|
|
173
|
+
code: 'WARNING',
|
|
174
|
+
err: new Error('ThreadStream: process exited before destination stream was drained. this may indicate that the destination stream try to write to a another missing stream')
|
|
175
|
+
})
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
process.exit(0)
|
|
179
|
+
})
|