@nocobase/plugin-logger 0.18.0-alpha.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +661 -0
- package/README.md +1 -0
- package/client.d.ts +2 -0
- package/client.js +1 -0
- package/dist/client/LogsDownloader.d.ts +2 -0
- package/dist/client/index.d.ts +7 -0
- package/dist/client/index.js +1 -0
- package/dist/client/locale/index.d.ts +3 -0
- package/dist/externalVersion.js +11 -0
- package/dist/index.d.ts +2 -0
- package/dist/index.js +39 -0
- package/dist/locale/en-US.d.ts +10 -0
- package/dist/locale/en-US.js +31 -0
- package/dist/locale/zh-CN.d.ts +10 -0
- package/dist/locale/zh-CN.js +31 -0
- package/dist/node_modules/tar-fs/LICENSE +21 -0
- package/dist/node_modules/tar-fs/index.js +1 -0
- package/dist/node_modules/tar-fs/node_modules/tar-stream/constants.js +14 -0
- package/dist/node_modules/tar-fs/node_modules/tar-stream/extract.js +406 -0
- package/dist/node_modules/tar-fs/node_modules/tar-stream/headers.js +321 -0
- package/dist/node_modules/tar-fs/node_modules/tar-stream/index.js +2 -0
- package/dist/node_modules/tar-fs/node_modules/tar-stream/pack.js +287 -0
- package/dist/node_modules/tar-fs/node_modules/tar-stream/package.json +35 -0
- package/dist/node_modules/tar-fs/package.json +1 -0
- package/dist/server/index.d.ts +1 -0
- package/dist/server/index.js +33 -0
- package/dist/server/plugin.d.ts +11 -0
- package/dist/server/plugin.js +61 -0
- package/dist/server/resourcer/logger.d.ts +9 -0
- package/dist/server/resourcer/logger.js +114 -0
- package/package.json +21 -0
- package/server.d.ts +2 -0
- package/server.js +1 -0
|
@@ -0,0 +1,287 @@
|
|
|
1
|
+
const { Readable, Writable, getStreamError } = require('streamx')
|
|
2
|
+
const b4a = require('b4a')
|
|
3
|
+
|
|
4
|
+
const constants = require('./constants')
|
|
5
|
+
const headers = require('./headers')
|
|
6
|
+
|
|
7
|
+
const DMODE = 0o755
|
|
8
|
+
const FMODE = 0o644
|
|
9
|
+
|
|
10
|
+
const END_OF_TAR = b4a.alloc(1024)
|
|
11
|
+
|
|
12
|
+
class Sink extends Writable {
|
|
13
|
+
constructor (pack, header, callback) {
|
|
14
|
+
super({ mapWritable, eagerOpen: true })
|
|
15
|
+
|
|
16
|
+
this.written = 0
|
|
17
|
+
this.header = header
|
|
18
|
+
|
|
19
|
+
this._callback = callback
|
|
20
|
+
this._linkname = null
|
|
21
|
+
this._isLinkname = header.type === 'symlink' && !header.linkname
|
|
22
|
+
this._isVoid = header.type !== 'file' && header.type !== 'contiguous-file'
|
|
23
|
+
this._finished = false
|
|
24
|
+
this._pack = pack
|
|
25
|
+
this._openCallback = null
|
|
26
|
+
|
|
27
|
+
if (this._pack._stream === null) this._pack._stream = this
|
|
28
|
+
else this._pack._pending.push(this)
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
_open (cb) {
|
|
32
|
+
this._openCallback = cb
|
|
33
|
+
if (this._pack._stream === this) this._continueOpen()
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
_continuePack (err) {
|
|
37
|
+
if (this._callback === null) return
|
|
38
|
+
|
|
39
|
+
const callback = this._callback
|
|
40
|
+
this._callback = null
|
|
41
|
+
|
|
42
|
+
callback(err)
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
_continueOpen () {
|
|
46
|
+
if (this._pack._stream === null) this._pack._stream = this
|
|
47
|
+
|
|
48
|
+
const cb = this._openCallback
|
|
49
|
+
this._openCallback = null
|
|
50
|
+
if (cb === null) return
|
|
51
|
+
|
|
52
|
+
if (this._pack.destroying) return cb(new Error('pack stream destroyed'))
|
|
53
|
+
if (this._pack._finalized) return cb(new Error('pack stream is already finalized'))
|
|
54
|
+
|
|
55
|
+
this._pack._stream = this
|
|
56
|
+
|
|
57
|
+
if (!this._isLinkname) {
|
|
58
|
+
this._pack._encode(this.header)
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
if (this._isVoid) {
|
|
62
|
+
this._finish()
|
|
63
|
+
this._continuePack(null)
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
cb(null)
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
_write (data, cb) {
|
|
70
|
+
if (this._isLinkname) {
|
|
71
|
+
this._linkname = this._linkname ? b4a.concat([this._linkname, data]) : data
|
|
72
|
+
return cb(null)
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
if (this._isVoid) {
|
|
76
|
+
if (data.byteLength > 0) {
|
|
77
|
+
return cb(new Error('No body allowed for this entry'))
|
|
78
|
+
}
|
|
79
|
+
return cb()
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
this.written += data.byteLength
|
|
83
|
+
if (this._pack.push(data)) return cb()
|
|
84
|
+
this._pack._drain = cb
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
_finish () {
|
|
88
|
+
if (this._finished) return
|
|
89
|
+
this._finished = true
|
|
90
|
+
|
|
91
|
+
if (this._isLinkname) {
|
|
92
|
+
this.header.linkname = this._linkname ? b4a.toString(this._linkname, 'utf-8') : ''
|
|
93
|
+
this._pack._encode(this.header)
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
overflow(this._pack, this.header.size)
|
|
97
|
+
|
|
98
|
+
this._pack._done(this)
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
_final (cb) {
|
|
102
|
+
if (this.written !== this.header.size) { // corrupting tar
|
|
103
|
+
return cb(new Error('Size mismatch'))
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
this._finish()
|
|
107
|
+
cb(null)
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
_getError () {
|
|
111
|
+
return getStreamError(this) || new Error('tar entry destroyed')
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
_predestroy () {
|
|
115
|
+
this._pack.destroy(this._getError())
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
_destroy (cb) {
|
|
119
|
+
this._pack._done(this)
|
|
120
|
+
|
|
121
|
+
this._continuePack(this._finished ? null : this._getError())
|
|
122
|
+
|
|
123
|
+
cb()
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
class Pack extends Readable {
|
|
128
|
+
constructor (opts) {
|
|
129
|
+
super(opts)
|
|
130
|
+
this._drain = noop
|
|
131
|
+
this._finalized = false
|
|
132
|
+
this._finalizing = false
|
|
133
|
+
this._pending = []
|
|
134
|
+
this._stream = null
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
entry (header, buffer, callback) {
|
|
138
|
+
if (this._finalized || this.destroying) throw new Error('already finalized or destroyed')
|
|
139
|
+
|
|
140
|
+
if (typeof buffer === 'function') {
|
|
141
|
+
callback = buffer
|
|
142
|
+
buffer = null
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
if (!callback) callback = noop
|
|
146
|
+
|
|
147
|
+
if (!header.size || header.type === 'symlink') header.size = 0
|
|
148
|
+
if (!header.type) header.type = modeToType(header.mode)
|
|
149
|
+
if (!header.mode) header.mode = header.type === 'directory' ? DMODE : FMODE
|
|
150
|
+
if (!header.uid) header.uid = 0
|
|
151
|
+
if (!header.gid) header.gid = 0
|
|
152
|
+
if (!header.mtime) header.mtime = new Date()
|
|
153
|
+
|
|
154
|
+
if (typeof buffer === 'string') buffer = b4a.from(buffer)
|
|
155
|
+
|
|
156
|
+
const sink = new Sink(this, header, callback)
|
|
157
|
+
|
|
158
|
+
if (b4a.isBuffer(buffer)) {
|
|
159
|
+
header.size = buffer.byteLength
|
|
160
|
+
sink.write(buffer)
|
|
161
|
+
sink.end()
|
|
162
|
+
return sink
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
if (sink._isVoid) {
|
|
166
|
+
return sink
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
return sink
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
finalize () {
|
|
173
|
+
if (this._stream || this._pending.length > 0) {
|
|
174
|
+
this._finalizing = true
|
|
175
|
+
return
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
if (this._finalized) return
|
|
179
|
+
this._finalized = true
|
|
180
|
+
|
|
181
|
+
this.push(END_OF_TAR)
|
|
182
|
+
this.push(null)
|
|
183
|
+
}
|
|
184
|
+
|
|
185
|
+
_done (stream) {
|
|
186
|
+
if (stream !== this._stream) return
|
|
187
|
+
|
|
188
|
+
this._stream = null
|
|
189
|
+
|
|
190
|
+
if (this._finalizing) this.finalize()
|
|
191
|
+
if (this._pending.length) this._pending.shift()._continueOpen()
|
|
192
|
+
}
|
|
193
|
+
|
|
194
|
+
_encode (header) {
|
|
195
|
+
if (!header.pax) {
|
|
196
|
+
const buf = headers.encode(header)
|
|
197
|
+
if (buf) {
|
|
198
|
+
this.push(buf)
|
|
199
|
+
return
|
|
200
|
+
}
|
|
201
|
+
}
|
|
202
|
+
this._encodePax(header)
|
|
203
|
+
}
|
|
204
|
+
|
|
205
|
+
_encodePax (header) {
|
|
206
|
+
const paxHeader = headers.encodePax({
|
|
207
|
+
name: header.name,
|
|
208
|
+
linkname: header.linkname,
|
|
209
|
+
pax: header.pax
|
|
210
|
+
})
|
|
211
|
+
|
|
212
|
+
const newHeader = {
|
|
213
|
+
name: 'PaxHeader',
|
|
214
|
+
mode: header.mode,
|
|
215
|
+
uid: header.uid,
|
|
216
|
+
gid: header.gid,
|
|
217
|
+
size: paxHeader.byteLength,
|
|
218
|
+
mtime: header.mtime,
|
|
219
|
+
type: 'pax-header',
|
|
220
|
+
linkname: header.linkname && 'PaxHeader',
|
|
221
|
+
uname: header.uname,
|
|
222
|
+
gname: header.gname,
|
|
223
|
+
devmajor: header.devmajor,
|
|
224
|
+
devminor: header.devminor
|
|
225
|
+
}
|
|
226
|
+
|
|
227
|
+
this.push(headers.encode(newHeader))
|
|
228
|
+
this.push(paxHeader)
|
|
229
|
+
overflow(this, paxHeader.byteLength)
|
|
230
|
+
|
|
231
|
+
newHeader.size = header.size
|
|
232
|
+
newHeader.type = header.type
|
|
233
|
+
this.push(headers.encode(newHeader))
|
|
234
|
+
}
|
|
235
|
+
|
|
236
|
+
_doDrain () {
|
|
237
|
+
const drain = this._drain
|
|
238
|
+
this._drain = noop
|
|
239
|
+
drain()
|
|
240
|
+
}
|
|
241
|
+
|
|
242
|
+
_predestroy () {
|
|
243
|
+
const err = getStreamError(this)
|
|
244
|
+
|
|
245
|
+
if (this._stream) this._stream.destroy(err)
|
|
246
|
+
|
|
247
|
+
while (this._pending.length) {
|
|
248
|
+
const stream = this._pending.shift()
|
|
249
|
+
stream.destroy(err)
|
|
250
|
+
stream._continueOpen()
|
|
251
|
+
}
|
|
252
|
+
|
|
253
|
+
this._doDrain()
|
|
254
|
+
}
|
|
255
|
+
|
|
256
|
+
_read (cb) {
|
|
257
|
+
this._doDrain()
|
|
258
|
+
cb()
|
|
259
|
+
}
|
|
260
|
+
}
|
|
261
|
+
|
|
262
|
+
module.exports = function pack (opts) {
|
|
263
|
+
return new Pack(opts)
|
|
264
|
+
}
|
|
265
|
+
|
|
266
|
+
function modeToType (mode) {
|
|
267
|
+
switch (mode & constants.S_IFMT) {
|
|
268
|
+
case constants.S_IFBLK: return 'block-device'
|
|
269
|
+
case constants.S_IFCHR: return 'character-device'
|
|
270
|
+
case constants.S_IFDIR: return 'directory'
|
|
271
|
+
case constants.S_IFIFO: return 'fifo'
|
|
272
|
+
case constants.S_IFLNK: return 'symlink'
|
|
273
|
+
}
|
|
274
|
+
|
|
275
|
+
return 'file'
|
|
276
|
+
}
|
|
277
|
+
|
|
278
|
+
function noop () {}
|
|
279
|
+
|
|
280
|
+
function overflow (self, size) {
|
|
281
|
+
size &= 511
|
|
282
|
+
if (size) self.push(END_OF_TAR.subarray(0, 512 - size))
|
|
283
|
+
}
|
|
284
|
+
|
|
285
|
+
function mapWritable (buf) {
|
|
286
|
+
return b4a.isBuffer(buf) ? buf : b4a.from(buf)
|
|
287
|
+
}
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "tar-stream",
|
|
3
|
+
"version": "3.1.6",
|
|
4
|
+
"description": "tar-stream is a streaming tar parser and generator and nothing else. It operates purely using streams which means you can easily extract/parse tarballs without ever hitting the file system.",
|
|
5
|
+
"main": "index.js",
|
|
6
|
+
"files": [
|
|
7
|
+
"*.js"
|
|
8
|
+
],
|
|
9
|
+
"browser": {
|
|
10
|
+
"fs": false
|
|
11
|
+
},
|
|
12
|
+
"scripts": {
|
|
13
|
+
"test": "standard && brittle test/*.js"
|
|
14
|
+
},
|
|
15
|
+
"repository": {
|
|
16
|
+
"type": "git",
|
|
17
|
+
"url": "git+https://github.com/mafintosh/tar-stream.git"
|
|
18
|
+
},
|
|
19
|
+
"author": "Mathias Buus <mathiasbuus@gmail.com>",
|
|
20
|
+
"license": "MIT",
|
|
21
|
+
"bugs": {
|
|
22
|
+
"url": "https://github.com/mafintosh/tar-stream/issues"
|
|
23
|
+
},
|
|
24
|
+
"homepage": "https://github.com/mafintosh/tar-stream",
|
|
25
|
+
"dependencies": {
|
|
26
|
+
"b4a": "^1.6.4",
|
|
27
|
+
"fast-fifo": "^1.2.0",
|
|
28
|
+
"streamx": "^2.15.0"
|
|
29
|
+
},
|
|
30
|
+
"devDependencies": {
|
|
31
|
+
"brittle": "^3.3.2",
|
|
32
|
+
"concat-stream": "^2.0.0",
|
|
33
|
+
"standard": "^17.0.1"
|
|
34
|
+
}
|
|
35
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"name":"tar-fs","version":"3.0.4","description":"filesystem bindings for tar-stream","dependencies":{"mkdirp-classic":"^0.5.2","pump":"^3.0.0","tar-stream":"^3.1.5"},"files":["index.js"],"standard":{"ignore":["test/fixtures/**"]},"keywords":["tar","fs","file","tarball","directory","stream"],"devDependencies":{"brittle":"^3.1.3","rimraf":"^2.6.3","standard":"^17.0.1"},"scripts":{"test":"standard && brittle test/index.js"},"bugs":{"url":"https://github.com/mafintosh/tar-fs/issues"},"homepage":"https://github.com/mafintosh/tar-fs","main":"index.js","directories":{"test":"test"},"author":"Mathias Buus","license":"MIT","repository":{"type":"git","url":"https://github.com/mafintosh/tar-fs.git"},"_lastModified":"2023-12-29T07:00:39.527Z"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export { default } from './plugin';
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
var __create = Object.create;
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
6
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
7
|
+
var __export = (target, all) => {
|
|
8
|
+
for (var name in all)
|
|
9
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
10
|
+
};
|
|
11
|
+
var __copyProps = (to, from, except, desc) => {
|
|
12
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
13
|
+
for (let key of __getOwnPropNames(from))
|
|
14
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
15
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
16
|
+
}
|
|
17
|
+
return to;
|
|
18
|
+
};
|
|
19
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
20
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
21
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
22
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
23
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
24
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
25
|
+
mod
|
|
26
|
+
));
|
|
27
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
28
|
+
var server_exports = {};
|
|
29
|
+
__export(server_exports, {
|
|
30
|
+
default: () => import_plugin.default
|
|
31
|
+
});
|
|
32
|
+
module.exports = __toCommonJS(server_exports);
|
|
33
|
+
var import_plugin = __toESM(require("./plugin"));
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
import { InstallOptions, Plugin } from '@nocobase/server';
|
|
2
|
+
export declare class PluginLoggerServer extends Plugin {
|
|
3
|
+
afterAdd(): void;
|
|
4
|
+
beforeLoad(): void;
|
|
5
|
+
load(): Promise<void>;
|
|
6
|
+
install(options?: InstallOptions): Promise<void>;
|
|
7
|
+
afterEnable(): Promise<void>;
|
|
8
|
+
afterDisable(): Promise<void>;
|
|
9
|
+
remove(): Promise<void>;
|
|
10
|
+
}
|
|
11
|
+
export default PluginLoggerServer;
|
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
var __create = Object.create;
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
6
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
7
|
+
var __export = (target, all) => {
|
|
8
|
+
for (var name in all)
|
|
9
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
10
|
+
};
|
|
11
|
+
var __copyProps = (to, from, except, desc) => {
|
|
12
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
13
|
+
for (let key of __getOwnPropNames(from))
|
|
14
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
15
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
16
|
+
}
|
|
17
|
+
return to;
|
|
18
|
+
};
|
|
19
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
20
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
21
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
22
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
23
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
24
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
25
|
+
mod
|
|
26
|
+
));
|
|
27
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
28
|
+
var plugin_exports = {};
|
|
29
|
+
__export(plugin_exports, {
|
|
30
|
+
PluginLoggerServer: () => PluginLoggerServer,
|
|
31
|
+
default: () => plugin_default
|
|
32
|
+
});
|
|
33
|
+
module.exports = __toCommonJS(plugin_exports);
|
|
34
|
+
var import_server = require("@nocobase/server");
|
|
35
|
+
var import_logger = __toESM(require("./resourcer/logger"));
|
|
36
|
+
class PluginLoggerServer extends import_server.Plugin {
|
|
37
|
+
afterAdd() {
|
|
38
|
+
}
|
|
39
|
+
beforeLoad() {
|
|
40
|
+
}
|
|
41
|
+
async load() {
|
|
42
|
+
this.app.resource(import_logger.default);
|
|
43
|
+
this.app.acl.registerSnippet({
|
|
44
|
+
name: `pm.${this.name}.logger`,
|
|
45
|
+
actions: ["logger:*"]
|
|
46
|
+
});
|
|
47
|
+
}
|
|
48
|
+
async install(options) {
|
|
49
|
+
}
|
|
50
|
+
async afterEnable() {
|
|
51
|
+
}
|
|
52
|
+
async afterDisable() {
|
|
53
|
+
}
|
|
54
|
+
async remove() {
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
var plugin_default = PluginLoggerServer;
|
|
58
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
59
|
+
0 && (module.exports = {
|
|
60
|
+
PluginLoggerServer
|
|
61
|
+
});
|
|
@@ -0,0 +1,114 @@
|
|
|
1
|
+
var __create = Object.create;
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
6
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
7
|
+
var __export = (target, all) => {
|
|
8
|
+
for (var name in all)
|
|
9
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
10
|
+
};
|
|
11
|
+
var __copyProps = (to, from, except, desc) => {
|
|
12
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
13
|
+
for (let key of __getOwnPropNames(from))
|
|
14
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
15
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
16
|
+
}
|
|
17
|
+
return to;
|
|
18
|
+
};
|
|
19
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
20
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
21
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
22
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
23
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
24
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
25
|
+
mod
|
|
26
|
+
));
|
|
27
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
28
|
+
var logger_exports = {};
|
|
29
|
+
__export(logger_exports, {
|
|
30
|
+
default: () => logger_default
|
|
31
|
+
});
|
|
32
|
+
module.exports = __toCommonJS(logger_exports);
|
|
33
|
+
var import_logger = require("@nocobase/logger");
|
|
34
|
+
var import_promises = require("fs/promises");
|
|
35
|
+
var import_path = require("path");
|
|
36
|
+
var import_stream = __toESM(require("stream"));
|
|
37
|
+
var import_tar_fs = require("tar-fs");
|
|
38
|
+
var import_zlib = __toESM(require("zlib"));
|
|
39
|
+
const tarFiles = (files) => {
|
|
40
|
+
return new Promise((resolve, reject) => {
|
|
41
|
+
const passthrough = new import_stream.default.PassThrough();
|
|
42
|
+
const gz = import_zlib.default.createGzip();
|
|
43
|
+
(0, import_tar_fs.pack)((0, import_logger.getLoggerFilePath)(), {
|
|
44
|
+
entries: files
|
|
45
|
+
}).on("data", (chunk) => {
|
|
46
|
+
passthrough.write(chunk);
|
|
47
|
+
}).on("end", () => {
|
|
48
|
+
passthrough.end();
|
|
49
|
+
}).on("error", (err) => reject(err));
|
|
50
|
+
passthrough.on("data", (chunk) => {
|
|
51
|
+
gz.write(chunk);
|
|
52
|
+
}).on("end", () => {
|
|
53
|
+
gz.end();
|
|
54
|
+
resolve(gz);
|
|
55
|
+
}).on("error", (err) => reject(err));
|
|
56
|
+
gz.on("error", (err) => reject(err));
|
|
57
|
+
});
|
|
58
|
+
};
|
|
59
|
+
var logger_default = {
|
|
60
|
+
name: "logger",
|
|
61
|
+
actions: {
|
|
62
|
+
list: async (ctx, next) => {
|
|
63
|
+
const path = (0, import_logger.getLoggerFilePath)();
|
|
64
|
+
const readDir = async (path2) => {
|
|
65
|
+
const fileTree = [];
|
|
66
|
+
try {
|
|
67
|
+
const files2 = await (0, import_promises.readdir)(path2, { withFileTypes: true });
|
|
68
|
+
for (const file of files2) {
|
|
69
|
+
if (file.isDirectory()) {
|
|
70
|
+
const subFiles = await readDir((0, import_path.join)(path2, file.name));
|
|
71
|
+
if (!subFiles.length) {
|
|
72
|
+
continue;
|
|
73
|
+
}
|
|
74
|
+
fileTree.push({
|
|
75
|
+
name: file.name,
|
|
76
|
+
files: subFiles
|
|
77
|
+
});
|
|
78
|
+
} else if (file.name.endsWith(".log")) {
|
|
79
|
+
fileTree.push(file.name);
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
return fileTree;
|
|
83
|
+
} catch (err) {
|
|
84
|
+
ctx.log.error("readDir error", { err, path: path2 });
|
|
85
|
+
return [];
|
|
86
|
+
}
|
|
87
|
+
};
|
|
88
|
+
const files = await readDir(path);
|
|
89
|
+
ctx.body = files;
|
|
90
|
+
await next();
|
|
91
|
+
},
|
|
92
|
+
download: async (ctx, next) => {
|
|
93
|
+
let { files = [] } = ctx.action.params.values || {};
|
|
94
|
+
const invalid = files.some((file) => !file.endsWith(".log"));
|
|
95
|
+
if (invalid) {
|
|
96
|
+
ctx.throw(400, ctx.t("Invalid file type: ") + invalid);
|
|
97
|
+
}
|
|
98
|
+
files = files.map((file) => {
|
|
99
|
+
if (file.startsWith("/")) {
|
|
100
|
+
return file.slice(1);
|
|
101
|
+
}
|
|
102
|
+
return file;
|
|
103
|
+
});
|
|
104
|
+
try {
|
|
105
|
+
ctx.attachment("logs.tar.gz");
|
|
106
|
+
ctx.body = await tarFiles(files);
|
|
107
|
+
} catch (err) {
|
|
108
|
+
ctx.log.error(`download error: ${err.message}`, { files, err: err.stack });
|
|
109
|
+
ctx.throw(500, ctx.t("Download logs failed."));
|
|
110
|
+
}
|
|
111
|
+
await next();
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
};
|
package/package.json
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@nocobase/plugin-logger",
|
|
3
|
+
"displayName": "Logger",
|
|
4
|
+
"displayName.zh-CN": "日志",
|
|
5
|
+
"description": "Package and download log files",
|
|
6
|
+
"description.zh-CN": "打包下载日志文件",
|
|
7
|
+
"version": "0.18.0-alpha.7",
|
|
8
|
+
"license": "AGPL-3.0",
|
|
9
|
+
"main": "dist/server/index.js",
|
|
10
|
+
"devDependencies": {
|
|
11
|
+
"@types/tar-fs": "^2.0.2",
|
|
12
|
+
"tar-fs": "^3.0.4"
|
|
13
|
+
},
|
|
14
|
+
"peerDependencies": {
|
|
15
|
+
"@nocobase/actions": "0.x",
|
|
16
|
+
"@nocobase/client": "0.x",
|
|
17
|
+
"@nocobase/server": "0.x",
|
|
18
|
+
"@nocobase/test": "0.x"
|
|
19
|
+
},
|
|
20
|
+
"gitHead": "979a9c59a98c61a2287dd847580746a9b597cbde"
|
|
21
|
+
}
|
package/server.d.ts
ADDED
package/server.js
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
module.exports = require('./dist/server/index.js');
|