@rharkor/caching-for-turbo 2.3.1 → 2.3.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,194 +1,174 @@
1
1
  'use strict'
2
2
 
3
- const EE = require('events')
4
- const { pipeline, PassThrough } = require('stream')
5
- const pino = require('../pino.js')
6
- const build = require('pino-abstract-transport')
7
- const loadTransportStreamBuilder = require('./transport-stream')
8
-
9
- // This file is not checked by the code coverage tool,
10
- // as it is not reliable.
11
-
12
- /* istanbul ignore file */
13
-
14
- /*
15
- * > Multiple targets & pipelines
16
- *
17
- *
18
- * ┌─────────────────────────────────────────────────┐ ┌─────┐
19
- * │ │ │ p │
20
- * │ │ │ i │
21
- * │ target │ │ n │
22
- * │ │ ────────────────────────────────┼────┤ o │
23
- * │ targets │ target │ │ .
24
- * │ ────────────► │ ────────────────────────────────┼────┤ m │ source
25
- * │ │ target │ │ u │ │
26
- * │ │ ────────────────────────────────┼────┤ l │ │write
27
- * │ │ │ │ t │ ▼
28
- * │ │ pipeline ┌───────────────┐ │ │ i │ ┌────────┐
29
- * │ │ ──────────► │ PassThrough ├───┼────┤ s ├──────┤ │
30
- * │ │ └───────────────┘ │ │ t │ write│ Thread │
31
- * │ │ │ │ r │◄─────┤ Stream │
32
- * │ │ pipeline ┌───────────────┐ │ │ e │ │ │
33
- * │ │ ──────────► │ PassThrough ├───┼────┤ a │ └────────┘
34
- * │ └───────────────┘ │ │ m │
35
- * │ │ │ │
36
- * └─────────────────────────────────────────────────┘ └─────┘
37
- *
38
- *
39
- *
40
- * > One single pipeline or target
41
- *
42
- *
43
- * source
44
- * │
45
- * ┌────────────────────────────────────────────────┐ │write
46
- * │ │ ▼
47
- * │ │ ┌────────┐
48
- * │ targets │ target │ │ │
49
- * │ ────────────► │ ──────────────────────────────┤ │ │
50
- * │ │ │ │ │
51
- * │ ├──────┤ │
52
- * │ │ │ │
53
- * │ │ │ │
54
- * │ OR │ │ │
55
- * │ │ │ │
56
- * │ │ │ │
57
- * │ ┌──────────────┐ │ │ │
58
- * │ targets │ pipeline │ │ │ │ Thread │
59
- * │ ────────────► │ ────────────►│ PassThrough ├─┤ │ Stream │
60
- * │ │ │ │ │ │ │
61
- * │ └──────────────┘ │ │ │
62
- * │ │ │ │
63
- * │ OR │ write│ │
64
- * │ │◄─────┤ │
65
- * │ │ │ │
66
- * │ ┌──────────────┐ │ │ │
67
- * │ pipeline │ │ │ │ │
68
- * │ ──────────────►│ PassThrough ├────────────────┤ │ │
69
- * │ │ │ │ │ │
70
- * │ └──────────────┘ │ └────────┘
71
- * │ │
72
- * │ │
73
- * └────────────────────────────────────────────────┘
74
- */
75
-
76
- module.exports = async function ({ targets, pipelines, levels, dedupe }) {
77
- const targetStreams = []
78
-
79
- // Process targets
80
- if (targets && targets.length) {
81
- targets = await Promise.all(targets.map(async (t) => {
82
- const fn = await loadTransportStreamBuilder(t.target)
83
- const stream = await fn(t.options)
84
- return {
85
- level: t.level,
86
- stream
3
+ const { realImport, realRequire } = require('real-require')
4
+ const { workerData, parentPort } = require('worker_threads')
5
+ const { WRITE_INDEX, READ_INDEX } = require('./indexes')
6
+ const { waitDiff } = require('./wait')
7
+
8
+ const {
9
+ dataBuf,
10
+ filename,
11
+ stateBuf
12
+ } = workerData
13
+
14
+ let destination
15
+
16
+ const state = new Int32Array(stateBuf)
17
+ const data = Buffer.from(dataBuf)
18
+
19
+ async function start () {
20
+ let worker
21
+ try {
22
+ if (filename.endsWith('.ts') || filename.endsWith('.cts')) {
23
+ // TODO: add support for the TSM modules loader ( https://github.com/lukeed/tsm ).
24
+ if (!process[Symbol.for('ts-node.register.instance')]) {
25
+ realRequire('ts-node/register')
26
+ } else if (process.env.TS_NODE_DEV) {
27
+ realRequire('ts-node-dev')
87
28
  }
88
- }))
89
-
90
- targetStreams.push(...targets)
29
+ // TODO: Support ES imports once tsc, tap & ts-node provide better compatibility guarantees.
30
+ // Remove extra forwardslash on Windows
31
+ worker = realRequire(decodeURIComponent(filename.replace(process.platform === 'win32' ? 'file:///' : 'file://', '')))
32
+ } else {
33
+ worker = (await realImport(filename))
34
+ }
35
+ } catch (error) {
36
+ // A yarn user that tries to start a ThreadStream for an external module
37
+ // provides a filename pointing to a zip file.
38
+ // eg. require.resolve('pino-elasticsearch') // returns /foo/pino-elasticsearch-npm-6.1.0-0c03079478-6915435172.zip/bar.js
39
+ // The `import` will fail to try to load it.
40
+ // This catch block executes the `require` fallback to load the module correctly.
41
+ // In fact, yarn modifies the `require` function to manage the zipped path.
42
+ // More details at https://github.com/pinojs/pino/pull/1113
43
+ // The error codes may change based on the node.js version (ENOTDIR > 12, ERR_MODULE_NOT_FOUND <= 12 )
44
+ if ((error.code === 'ENOTDIR' || error.code === 'ERR_MODULE_NOT_FOUND') &&
45
+ filename.startsWith('file://')) {
46
+ worker = realRequire(decodeURIComponent(filename.replace('file://', '')))
47
+ } else if (error.code === undefined || error.code === 'ERR_VM_DYNAMIC_IMPORT_CALLBACK_MISSING') {
48
+ // When bundled with pkg, an undefined error is thrown when called with realImport
49
+ // When bundled with pkg and using node v20, an ERR_VM_DYNAMIC_IMPORT_CALLBACK_MISSING error is thrown when called with realImport
50
+ // More info at: https://github.com/pinojs/thread-stream/issues/143
51
+ try {
52
+ worker = realRequire(decodeURIComponent(filename.replace(process.platform === 'win32' ? 'file:///' : 'file://', '')))
53
+ } catch {
54
+ throw error
55
+ }
56
+ } else {
57
+ throw error
58
+ }
91
59
  }
92
60
 
93
- // Process pipelines
94
- if (pipelines && pipelines.length) {
95
- pipelines = await Promise.all(
96
- pipelines.map(async (p) => {
97
- let level
98
- const pipeDests = await Promise.all(
99
- p.map(async (t) => {
100
- // level assigned to pipeline is duplicated over all its targets, just store it
101
- level = t.level
102
- const fn = await loadTransportStreamBuilder(t.target)
103
- const stream = await fn(t.options)
104
- return stream
105
- }
106
- ))
107
-
108
- return {
109
- level,
110
- stream: createPipeline(pipeDests)
111
- }
112
- })
113
- )
114
- targetStreams.push(...pipelines)
115
- }
61
+ // Depending on how the default export is performed, and on how the code is
62
+ // transpiled, we may find cases of two nested "default" objects.
63
+ // See https://github.com/pinojs/pino/issues/1243#issuecomment-982774762
64
+ if (typeof worker === 'object') worker = worker.default
65
+ if (typeof worker === 'object') worker = worker.default
116
66
 
117
- // Skip building the multistream step if either one single pipeline or target is defined and
118
- // return directly the stream instance back to TreadStream.
119
- // This is equivalent to define either:
120
- //
121
- // pino.transport({ target: ... })
122
- //
123
- // OR
124
- //
125
- // pino.transport({ pipeline: ... })
126
- if (targetStreams.length === 1) {
127
- return targetStreams[0].stream
128
- } else {
129
- return build(process, {
130
- parse: 'lines',
131
- metadata: true,
132
- close (err, cb) {
133
- let expected = 0
134
- for (const transport of targetStreams) {
135
- expected++
136
- transport.stream.on('close', closeCb)
137
- transport.stream.end()
138
- }
139
-
140
- function closeCb () {
141
- if (--expected === 0) {
142
- cb(err)
143
- }
144
- }
145
- }
67
+ destination = await worker(workerData.workerData)
68
+
69
+ destination.on('error', function (err) {
70
+ Atomics.store(state, WRITE_INDEX, -2)
71
+ Atomics.notify(state, WRITE_INDEX)
72
+
73
+ Atomics.store(state, READ_INDEX, -2)
74
+ Atomics.notify(state, READ_INDEX)
75
+
76
+ parentPort.postMessage({
77
+ code: 'ERROR',
78
+ err
79
+ })
80
+ })
81
+
82
+ destination.on('close', function () {
83
+ // process._rawDebug('worker close emitted')
84
+ const end = Atomics.load(state, WRITE_INDEX)
85
+ Atomics.store(state, READ_INDEX, end)
86
+ Atomics.notify(state, READ_INDEX)
87
+ setImmediate(() => {
88
+ process.exit(0)
146
89
  })
90
+ })
91
+ }
92
+
93
+ // No .catch() handler,
94
+ // in case there is an error it goes
95
+ // to unhandledRejection
96
+ start().then(function () {
97
+ parentPort.postMessage({
98
+ code: 'READY'
99
+ })
100
+
101
+ process.nextTick(run)
102
+ })
103
+
104
+ function run () {
105
+ const current = Atomics.load(state, READ_INDEX)
106
+ const end = Atomics.load(state, WRITE_INDEX)
107
+
108
+ // process._rawDebug(`pre state ${current} ${end}`)
109
+
110
+ if (end === current) {
111
+ if (end === data.length) {
112
+ waitDiff(state, READ_INDEX, end, Infinity, run)
113
+ } else {
114
+ waitDiff(state, WRITE_INDEX, end, Infinity, run)
115
+ }
116
+ return
147
117
  }
148
118
 
149
- // TODO: Why split2 was not used for pipelines?
150
- function process (stream) {
151
- const multi = pino.multistream(targetStreams, { levels, dedupe })
152
- // TODO manage backpressure
153
- stream.on('data', function (chunk) {
154
- const { lastTime, lastMsg, lastObj, lastLevel } = this
155
- multi.lastLevel = lastLevel
156
- multi.lastTime = lastTime
157
- multi.lastMsg = lastMsg
158
- multi.lastObj = lastObj
159
-
160
- // TODO handle backpressure
161
- multi.write(chunk + '\n')
162
- })
119
+ // process._rawDebug(`post state ${current} ${end}`)
120
+
121
+ if (end === -1) {
122
+ // process._rawDebug('end')
123
+ destination.end()
124
+ return
163
125
  }
164
126
 
165
- /**
166
- * Creates a pipeline using the provided streams and return an instance of `PassThrough` stream
167
- * as a source for the pipeline.
168
- *
169
- * @param {(TransformStream|WritableStream)[]} streams An array of streams.
170
- * All intermediate streams in the array *MUST* be `Transform` streams and only the last one `Writable`.
171
- * @returns A `PassThrough` stream instance representing the source stream of the pipeline
172
- */
173
- function createPipeline (streams) {
174
- const ee = new EE()
175
- const stream = new PassThrough({
176
- autoDestroy: true,
177
- destroy (_, cb) {
178
- ee.on('error', cb)
179
- ee.on('closed', cb)
180
- }
181
- })
127
+ const toWrite = data.toString('utf8', current, end)
128
+ // process._rawDebug('worker writing: ' + toWrite)
182
129
 
183
- pipeline(stream, ...streams, function (err) {
184
- if (err && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') {
185
- ee.emit('error', err)
186
- return
187
- }
130
+ const res = destination.write(toWrite)
188
131
 
189
- ee.emit('closed')
132
+ if (res) {
133
+ Atomics.store(state, READ_INDEX, end)
134
+ Atomics.notify(state, READ_INDEX)
135
+ setImmediate(run)
136
+ } else {
137
+ destination.once('drain', function () {
138
+ Atomics.store(state, READ_INDEX, end)
139
+ Atomics.notify(state, READ_INDEX)
140
+ run()
190
141
  })
191
-
192
- return stream
193
142
  }
194
143
  }
144
+
145
+ process.on('unhandledRejection', function (err) {
146
+ parentPort.postMessage({
147
+ code: 'ERROR',
148
+ err
149
+ })
150
+ process.exit(1)
151
+ })
152
+
153
+ process.on('uncaughtException', function (err) {
154
+ parentPort.postMessage({
155
+ code: 'ERROR',
156
+ err
157
+ })
158
+ process.exit(1)
159
+ })
160
+
161
+ process.once('exit', exitCode => {
162
+ if (exitCode !== 0) {
163
+ process.exit(exitCode)
164
+ return
165
+ }
166
+ if (destination?.writableNeedDrain && !destination?.writableEnded) {
167
+ parentPort.postMessage({
168
+ code: 'WARNING',
169
+ err: new Error('ThreadStream: process exited before destination stream was drained. this may indicate that the destination stream try to write to a another missing stream')
170
+ })
171
+ }
172
+
173
+ process.exit(0)
174
+ })
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@rharkor/caching-for-turbo",
3
3
  "description": "Sets up Turborepo Remote Caching to work with GitHub Actions built-in cache",
4
- "version": "2.3.1",
4
+ "version": "2.3.3",
5
5
  "private": false,
6
6
  "homepage": "https://github.com/rharkor/caching-for-turbo",
7
7
  "repository": {
@@ -28,7 +28,7 @@
28
28
  "node": ">=20"
29
29
  },
30
30
  "license": "MIT",
31
- "packageManager": "npm@11.5.2",
31
+ "packageManager": "npm@11.6.4",
32
32
  "publishConfig": {
33
33
  "access": "public"
34
34
  }