@kubb/agent 4.29.1 → 4.31.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.output/nitro.json +1 -1
- package/.output/server/chunks/nitro/nitro.mjs +542 -380
- package/.output/server/chunks/nitro/nitro.mjs.map +1 -1
- package/.output/server/chunks/routes/api/health.get.mjs +3 -1
- package/.output/server/chunks/routes/api/health.get.mjs.map +1 -1
- package/.output/server/index.mjs +3 -1
- package/.output/server/index.mjs.map +1 -1
- package/.output/server/node_modules/@kubb/fabric-core/dist/chunk-BVHe6Par.js +22 -0
- package/.output/server/node_modules/@kubb/fabric-core/dist/{defaultParser-C1atU7yU.js → defaultParser-BD_N68Bo.js} +3 -2
- package/.output/server/node_modules/@kubb/fabric-core/dist/{getRelativePath-BcieQL5M.js → getRelativePath-C4Au07ON.js} +2 -1
- package/.output/server/node_modules/@kubb/fabric-core/dist/index.js +8 -4
- package/.output/server/node_modules/@kubb/fabric-core/dist/{onProcessExit-CF200hsz.js → onProcessExit-CnZym153.js} +10 -3
- package/.output/server/node_modules/@kubb/fabric-core/dist/parsers/typescript.js +2 -1
- package/.output/server/node_modules/@kubb/fabric-core/dist/parsers.js +3 -2
- package/.output/server/node_modules/@kubb/fabric-core/dist/plugins.js +3 -2
- package/.output/server/node_modules/@kubb/fabric-core/package.json +3 -2
- package/.output/server/node_modules/@kubb/react-fabric/dist/{chunk-Cnw5r_-A.js → chunk-DbZtQ4qb.js} +5 -1
- package/.output/server/node_modules/@kubb/react-fabric/dist/index.js +4 -4
- package/.output/server/node_modules/@kubb/react-fabric/dist/{jsx-runtime-Dm7PKVbc.js → jsx-runtime-CZxrhUx8.js} +24 -5
- package/.output/server/node_modules/@kubb/react-fabric/dist/jsx-runtime.js +3 -3
- package/.output/server/node_modules/@kubb/react-fabric/dist/plugins.js +3 -3
- package/.output/server/node_modules/@kubb/react-fabric/dist/{react-D652OKii.js → react-C2cYsofv.js} +87 -26
- package/.output/server/node_modules/@kubb/react-fabric/dist/{reactPlugin-BkDkx1cs.js → reactPlugin-DGKBWjiR.js} +1454 -317
- package/.output/server/node_modules/@kubb/react-fabric/package.json +4 -3
- package/.output/server/node_modules/@logtail/core/dist/cjs/base.js +339 -0
- package/.output/server/node_modules/@logtail/core/dist/cjs/index.js +9 -0
- package/.output/server/node_modules/@logtail/core/package.json +45 -0
- package/.output/server/node_modules/@logtail/node/dist/cjs/context.js +93 -0
- package/.output/server/node_modules/@logtail/node/dist/cjs/index.js +6 -0
- package/.output/server/node_modules/@logtail/node/dist/cjs/node.js +106 -0
- package/.output/server/node_modules/@logtail/node/package.json +53 -0
- package/.output/server/node_modules/@logtail/tools/dist/cjs/batch.js +122 -0
- package/.output/server/node_modules/@logtail/tools/dist/cjs/burstProtection.js +50 -0
- package/.output/server/node_modules/@logtail/tools/dist/cjs/encode.js +12 -0
- package/.output/server/node_modules/@logtail/tools/dist/cjs/index.js +41 -0
- package/.output/server/node_modules/@logtail/tools/dist/cjs/queue.js +41 -0
- package/.output/server/node_modules/@logtail/tools/dist/cjs/throttle.js +67 -0
- package/.output/server/node_modules/@logtail/tools/package.json +39 -0
- package/.output/server/node_modules/@logtail/types/dist/cjs/types.js +21 -0
- package/.output/server/node_modules/@logtail/types/package.json +45 -0
- package/.output/server/node_modules/@msgpack/msgpack/dist/CachedKeyDecoder.js +63 -0
- package/.output/server/node_modules/@msgpack/msgpack/dist/DecodeError.js +18 -0
- package/.output/server/node_modules/@msgpack/msgpack/dist/Decoder.js +583 -0
- package/.output/server/node_modules/@msgpack/msgpack/dist/Encoder.js +408 -0
- package/.output/server/node_modules/@msgpack/msgpack/dist/ExtData.js +14 -0
- package/.output/server/node_modules/@msgpack/msgpack/dist/ExtensionCodec.js +72 -0
- package/.output/server/node_modules/@msgpack/msgpack/dist/decode.js +32 -0
- package/.output/server/node_modules/@msgpack/msgpack/dist/decodeAsync.js +44 -0
- package/.output/server/node_modules/@msgpack/msgpack/dist/encode.js +17 -0
- package/.output/server/node_modules/@msgpack/msgpack/dist/index.js +34 -0
- package/.output/server/node_modules/@msgpack/msgpack/dist/timestamp.js +104 -0
- package/.output/server/node_modules/@msgpack/msgpack/dist/utils/int.js +34 -0
- package/.output/server/node_modules/@msgpack/msgpack/dist/utils/prettyByte.js +8 -0
- package/.output/server/node_modules/@msgpack/msgpack/dist/utils/stream.js +40 -0
- package/.output/server/node_modules/@msgpack/msgpack/dist/utils/typedArrays.js +28 -0
- package/.output/server/node_modules/@msgpack/msgpack/dist/utils/utf8.js +167 -0
- package/.output/server/node_modules/@msgpack/msgpack/package.json +100 -0
- package/.output/server/node_modules/consola/dist/chunks/prompt.mjs +280 -0
- package/.output/server/node_modules/consola/dist/core.mjs +512 -0
- package/.output/server/node_modules/consola/dist/index.mjs +651 -0
- package/.output/server/node_modules/consola/dist/shared/consola.DRwqZj3T.mjs +72 -0
- package/.output/server/node_modules/consola/dist/shared/consola.DXBYu-KD.mjs +288 -0
- package/.output/server/node_modules/consola/package.json +136 -0
- package/.output/server/node_modules/serialize-error/index.js +154 -0
- package/.output/server/node_modules/serialize-error/package.json +41 -0
- package/.output/server/node_modules/stack-trace/lib/stack-trace.js +136 -0
- package/.output/server/node_modules/stack-trace/package.json +21 -0
- package/.output/server/package.json +11 -3
- package/README.md +3 -3
- package/package.json +23 -18
|
@@ -0,0 +1,122 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.calculateJsonLogSizeBytes = void 0;
|
|
4
|
+
exports.default = makeBatch;
|
|
5
|
+
/*
|
|
6
|
+
* Default buffer size
|
|
7
|
+
*/
|
|
8
|
+
const DEFAULT_BUFFER_SIZE = 1000;
|
|
9
|
+
/*
|
|
10
|
+
* Default flush timeout
|
|
11
|
+
*/
|
|
12
|
+
const DEFAULT_FLUSH_TIMEOUT = 1000;
|
|
13
|
+
/*
|
|
14
|
+
* Default retry count
|
|
15
|
+
*/
|
|
16
|
+
const DEFAULT_RETRY_COUNT = 3;
|
|
17
|
+
/*
|
|
18
|
+
* Default retry backoff
|
|
19
|
+
*/
|
|
20
|
+
const DEFAULT_RETRY_BACKOFF = 100;
|
|
21
|
+
/*
|
|
22
|
+
* Default function for computing log size (serialized JSON length + 1 for comma)
|
|
23
|
+
*/
|
|
24
|
+
const calculateJsonLogSizeBytes = (log) => JSON.stringify(log).length + 1;
|
|
25
|
+
exports.calculateJsonLogSizeBytes = calculateJsonLogSizeBytes;
|
|
26
|
+
/**
|
|
27
|
+
* batch the buffer coming in, process them and then resolve
|
|
28
|
+
*
|
|
29
|
+
* @param size - Number
|
|
30
|
+
* @param flushTimeout - Number
|
|
31
|
+
* @param retryCount - Number
|
|
32
|
+
* @param retryBackoff - Number
|
|
33
|
+
* @param sizeBytes - Size of the batch (in bytes) that triggers flushing. Set to 0 to disable.
|
|
34
|
+
* @param calculateLogSizeBytes - Function to calculate size of a single ILogtailLog instance (in bytes).
|
|
35
|
+
*/
|
|
36
|
+
function makeBatch(size = DEFAULT_BUFFER_SIZE, flushTimeout = DEFAULT_FLUSH_TIMEOUT, retryCount = DEFAULT_RETRY_COUNT, retryBackoff = DEFAULT_RETRY_BACKOFF, sizeBytes = 0, calculateLogSizeBytes = exports.calculateJsonLogSizeBytes) {
|
|
37
|
+
let timeout;
|
|
38
|
+
let cb;
|
|
39
|
+
let buffer = [];
|
|
40
|
+
let bufferSizeBytes = 0;
|
|
41
|
+
let retry = 0;
|
|
42
|
+
// Wait until the minimum retry backoff time has passed before retrying
|
|
43
|
+
let minRetryBackoff = 0;
|
|
44
|
+
/*
|
|
45
|
+
* Process then flush the list
|
|
46
|
+
*/
|
|
47
|
+
async function flush() {
|
|
48
|
+
if (timeout) {
|
|
49
|
+
clearTimeout(timeout);
|
|
50
|
+
}
|
|
51
|
+
timeout = null;
|
|
52
|
+
const currentBuffer = buffer;
|
|
53
|
+
const currentBufferSizeKB = bufferSizeBytes;
|
|
54
|
+
buffer = [];
|
|
55
|
+
bufferSizeBytes = 0;
|
|
56
|
+
try {
|
|
57
|
+
await cb(currentBuffer.map((d) => d.log));
|
|
58
|
+
currentBuffer.forEach((d) => d.resolve(d.log));
|
|
59
|
+
retry = 0;
|
|
60
|
+
}
|
|
61
|
+
catch (e) {
|
|
62
|
+
if (retry < retryCount) {
|
|
63
|
+
retry++;
|
|
64
|
+
minRetryBackoff = Date.now() + retryBackoff;
|
|
65
|
+
buffer = buffer.concat(currentBuffer);
|
|
66
|
+
bufferSizeBytes += currentBufferSizeKB;
|
|
67
|
+
await setupTimeout();
|
|
68
|
+
return;
|
|
69
|
+
}
|
|
70
|
+
currentBuffer.map((d) => d.reject(e));
|
|
71
|
+
retry = 0;
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
/*
|
|
75
|
+
* Start timeout to flush
|
|
76
|
+
*/
|
|
77
|
+
async function setupTimeout() {
|
|
78
|
+
if (timeout) {
|
|
79
|
+
return;
|
|
80
|
+
}
|
|
81
|
+
return new Promise((resolve) => {
|
|
82
|
+
timeout = setTimeout(async function () {
|
|
83
|
+
await flush();
|
|
84
|
+
resolve();
|
|
85
|
+
}, flushTimeout);
|
|
86
|
+
});
|
|
87
|
+
}
|
|
88
|
+
/*
|
|
89
|
+
* Batcher which takes a process function
|
|
90
|
+
* @param fn - Any function to process list
|
|
91
|
+
*/
|
|
92
|
+
return {
|
|
93
|
+
initPusher: function (fn) {
|
|
94
|
+
cb = fn;
|
|
95
|
+
/*
|
|
96
|
+
* Pushes each log into list
|
|
97
|
+
* @param log: ILogtailLog - Any object to push into list
|
|
98
|
+
*/
|
|
99
|
+
return async function (log) {
|
|
100
|
+
return new Promise(async (resolve, reject) => {
|
|
101
|
+
buffer.push({ log, resolve, reject });
|
|
102
|
+
// We can skip log size calculation if there is no max size set
|
|
103
|
+
if (sizeBytes > 0) {
|
|
104
|
+
bufferSizeBytes += calculateLogSizeBytes(log);
|
|
105
|
+
}
|
|
106
|
+
// If the buffer is full enough, flush it
|
|
107
|
+
// Unless we're still waiting for the minimum retry backoff time
|
|
108
|
+
const isBufferFullEnough = buffer.length >= size || (sizeBytes > 0 && bufferSizeBytes >= sizeBytes);
|
|
109
|
+
if (isBufferFullEnough && Date.now() > minRetryBackoff) {
|
|
110
|
+
await flush();
|
|
111
|
+
}
|
|
112
|
+
else {
|
|
113
|
+
await setupTimeout();
|
|
114
|
+
}
|
|
115
|
+
return resolve;
|
|
116
|
+
});
|
|
117
|
+
};
|
|
118
|
+
},
|
|
119
|
+
flush,
|
|
120
|
+
};
|
|
121
|
+
}
|
|
122
|
+
//# sourceMappingURL=batch.js.map
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.default = makeBurstProtection;
|
|
4
|
+
const RESOLUTION = 64;
|
|
5
|
+
/**
|
|
6
|
+
* Create a burst protection which allows running function only a number of times in a configurable window
|
|
7
|
+
* @param milliseconds - length of the checked window in milliseconds
|
|
8
|
+
* @param max - maximum number of functions to run in that window
|
|
9
|
+
* @param functionName - function name for error message
|
|
10
|
+
*/
|
|
11
|
+
function makeBurstProtection(milliseconds, max, functionName = "The function") {
|
|
12
|
+
if (milliseconds <= 0 || max <= 0) {
|
|
13
|
+
return (fn) => fn;
|
|
14
|
+
}
|
|
15
|
+
let callCounts = [0];
|
|
16
|
+
let lastErrorOutput = 0;
|
|
17
|
+
let lastIntervalTime = Date.now();
|
|
18
|
+
function updateCallCounts() {
|
|
19
|
+
const now = Date.now();
|
|
20
|
+
const intervalLength = milliseconds / RESOLUTION;
|
|
21
|
+
if (now < lastIntervalTime + intervalLength) {
|
|
22
|
+
return;
|
|
23
|
+
}
|
|
24
|
+
// Prepend callCounts with correct number of zeroes and keep its length to RESOLUTION at max
|
|
25
|
+
const intervalCountSinceLast = Math.floor((now - lastIntervalTime) / intervalLength);
|
|
26
|
+
callCounts = Array(Math.min(intervalCountSinceLast, RESOLUTION)).fill(0).concat(callCounts).slice(0, RESOLUTION);
|
|
27
|
+
lastIntervalTime += intervalCountSinceLast * intervalLength;
|
|
28
|
+
}
|
|
29
|
+
function getTotalCallCount() {
|
|
30
|
+
return callCounts.reduce((total, item) => total + item);
|
|
31
|
+
}
|
|
32
|
+
function incrementCallCount() {
|
|
33
|
+
callCounts[0]++;
|
|
34
|
+
}
|
|
35
|
+
return (fn) => {
|
|
36
|
+
return async (...args) => {
|
|
37
|
+
updateCallCounts();
|
|
38
|
+
if (getTotalCallCount() < max) {
|
|
39
|
+
incrementCallCount();
|
|
40
|
+
return await fn(...args);
|
|
41
|
+
}
|
|
42
|
+
const now = Date.now();
|
|
43
|
+
if (lastErrorOutput < now - milliseconds) {
|
|
44
|
+
lastErrorOutput = now;
|
|
45
|
+
console.error(`${functionName} was called more than ${max} times during last ${milliseconds}ms. Ignoring.`);
|
|
46
|
+
}
|
|
47
|
+
};
|
|
48
|
+
};
|
|
49
|
+
}
|
|
50
|
+
//# sourceMappingURL=burstProtection.js.map
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.base64Encode = base64Encode;
|
|
4
|
+
/**
|
|
5
|
+
* Converts a plain-text string to a base64 string
|
|
6
|
+
*
|
|
7
|
+
* @param str - Plain text string -> base64
|
|
8
|
+
*/
|
|
9
|
+
function base64Encode(str) {
|
|
10
|
+
return Buffer.from(str).toString("base64");
|
|
11
|
+
}
|
|
12
|
+
//# sourceMappingURL=encode.js.map
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || function (mod) {
|
|
19
|
+
if (mod && mod.__esModule) return mod;
|
|
20
|
+
var result = {};
|
|
21
|
+
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
|
22
|
+
__setModuleDefault(result, mod);
|
|
23
|
+
return result;
|
|
24
|
+
};
|
|
25
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
26
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
27
|
+
};
|
|
28
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
29
|
+
exports.calculateJsonLogSizeBytes = exports.makeThrottle = exports.makeBurstProtection = exports.makeBatch = exports.base64Encode = exports.Queue = void 0;
|
|
30
|
+
const queue_1 = __importDefault(require("./queue"));
|
|
31
|
+
exports.Queue = queue_1.default;
|
|
32
|
+
const encode_1 = require("./encode");
|
|
33
|
+
Object.defineProperty(exports, "base64Encode", { enumerable: true, get: function () { return encode_1.base64Encode; } });
|
|
34
|
+
const batch_1 = __importStar(require("./batch"));
|
|
35
|
+
exports.makeBatch = batch_1.default;
|
|
36
|
+
Object.defineProperty(exports, "calculateJsonLogSizeBytes", { enumerable: true, get: function () { return batch_1.calculateJsonLogSizeBytes; } });
|
|
37
|
+
const burstProtection_1 = __importDefault(require("./burstProtection"));
|
|
38
|
+
exports.makeBurstProtection = burstProtection_1.default;
|
|
39
|
+
const throttle_1 = __importDefault(require("./throttle"));
|
|
40
|
+
exports.makeThrottle = throttle_1.default;
|
|
41
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
/**
|
|
4
|
+
* Queue, for FIFO access to arbitrary objects. Intended to be a faster
|
|
5
|
+
* replacement for a Javascript array.
|
|
6
|
+
*/
|
|
7
|
+
class Queue {
|
|
8
|
+
constructor() {
|
|
9
|
+
/**
|
|
10
|
+
* First node in the tree
|
|
11
|
+
*/
|
|
12
|
+
/**
|
|
13
|
+
* Number of items in the queue
|
|
14
|
+
*/
|
|
15
|
+
this.length = 0;
|
|
16
|
+
}
|
|
17
|
+
/**
|
|
18
|
+
* Pushes a value into the queue.
|
|
19
|
+
* @param value - Any object to push into the queue
|
|
20
|
+
*/
|
|
21
|
+
push(value) {
|
|
22
|
+
const node = { value };
|
|
23
|
+
this.last = this.last ? (this.last.next = node) : (this.first = node);
|
|
24
|
+
this.length++;
|
|
25
|
+
}
|
|
26
|
+
/**
|
|
27
|
+
* Remove a value from the start of the queue (FIFO) and return it
|
|
28
|
+
*/
|
|
29
|
+
shift() {
|
|
30
|
+
if (this.first) {
|
|
31
|
+
const { value } = this.first;
|
|
32
|
+
this.first = this.first.next;
|
|
33
|
+
if (!--this.length) {
|
|
34
|
+
this.last = undefined;
|
|
35
|
+
}
|
|
36
|
+
return value;
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
exports.default = Queue;
|
|
41
|
+
//# sourceMappingURL=queue.js.map
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.default = makeThrottle;
|
|
7
|
+
const queue_1 = __importDefault(require("./queue"));
|
|
8
|
+
/**
|
|
9
|
+
* Create a throttle which runs up to `max` async functions at once
|
|
10
|
+
* @param max - maximum number of async functions to run
|
|
11
|
+
* @param queueMax - maximum number of functions to queue (-1 = unlimited, default)
|
|
12
|
+
*/
|
|
13
|
+
function makeThrottle(max, queueMax = -1) {
|
|
14
|
+
// Current iteration cycle
|
|
15
|
+
let current = 0;
|
|
16
|
+
// Create a FIFO queue
|
|
17
|
+
const queue = new queue_1.default();
|
|
18
|
+
/**
|
|
19
|
+
* Throttle function that throttles the passed func according to `max`
|
|
20
|
+
* @param fn - async function to resolve
|
|
21
|
+
*/
|
|
22
|
+
function throttle(fn) {
|
|
23
|
+
return async (...args) => {
|
|
24
|
+
return new Promise((resolve, reject) => {
|
|
25
|
+
/**
|
|
26
|
+
* Handler for resolving the Promise chain
|
|
27
|
+
*/
|
|
28
|
+
async function handler() {
|
|
29
|
+
// Only resolve if the `max` hasn't been exhausted
|
|
30
|
+
if (current < max) {
|
|
31
|
+
// Increment the available slot size
|
|
32
|
+
current++;
|
|
33
|
+
try {
|
|
34
|
+
// Await the passed function here first, to determine if any
|
|
35
|
+
// errors are thrown, so they can be handled by our outside `reject`
|
|
36
|
+
resolve(await fn(...args));
|
|
37
|
+
}
|
|
38
|
+
catch (e) {
|
|
39
|
+
reject(e);
|
|
40
|
+
}
|
|
41
|
+
// Since this has now resolved, make the slot available again
|
|
42
|
+
current--;
|
|
43
|
+
// If there are items waiting in the queue, resolve the next
|
|
44
|
+
// Promise
|
|
45
|
+
if (queue.length > 0) {
|
|
46
|
+
queue.shift()();
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
else if (queueMax < 0 || queue.length < queueMax) {
|
|
50
|
+
// The `max` has been exceeded and if queue limit hasn't been hit - push onto the queue to wait
|
|
51
|
+
queue.push(handler);
|
|
52
|
+
}
|
|
53
|
+
else {
|
|
54
|
+
// Reject this call due to queue limit
|
|
55
|
+
reject(new Error("Queue max limit exceeded"));
|
|
56
|
+
return;
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
// Return the async handler
|
|
60
|
+
return handler();
|
|
61
|
+
});
|
|
62
|
+
};
|
|
63
|
+
}
|
|
64
|
+
// Return the throttle function
|
|
65
|
+
return throttle;
|
|
66
|
+
}
|
|
67
|
+
//# sourceMappingURL=throttle.js.map
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@logtail/tools",
|
|
3
|
+
"version": "0.5.6",
|
|
4
|
+
"description": "Better Stack JavaScript logging tools (formerly Logtail)",
|
|
5
|
+
"main": "dist/cjs/index.js",
|
|
6
|
+
"module": "dist/es6/index.js",
|
|
7
|
+
"types": "dist/es6/index.d.ts",
|
|
8
|
+
"sideEffects": false,
|
|
9
|
+
"files": [
|
|
10
|
+
"src",
|
|
11
|
+
"dist"
|
|
12
|
+
],
|
|
13
|
+
"publishConfig": {
|
|
14
|
+
"access": "public"
|
|
15
|
+
},
|
|
16
|
+
"scripts": {
|
|
17
|
+
"build:cjs": "tsc",
|
|
18
|
+
"build:es6": "tsc -p tsconfig.es6.json",
|
|
19
|
+
"build": "run-p build:*",
|
|
20
|
+
"prepublishOnly": "npm run build",
|
|
21
|
+
"test": "jest"
|
|
22
|
+
},
|
|
23
|
+
"author": "Better Stack <hello@betterstack.com>",
|
|
24
|
+
"repository": "https://github.com/logtail/logtail-js/tree/master/packages/tools",
|
|
25
|
+
"license": "ISC",
|
|
26
|
+
"bugs": {
|
|
27
|
+
"url": "https://github.com/logtail/logtail-js/issues"
|
|
28
|
+
},
|
|
29
|
+
"homepage": "https://github.com/logtail/logtail-js/tree/master/packages/tools#readme",
|
|
30
|
+
"devDependencies": {
|
|
31
|
+
"@types/nock": "^11.1.0",
|
|
32
|
+
"@types/source-map": "^0.5.7",
|
|
33
|
+
"nock": "^14.0.1"
|
|
34
|
+
},
|
|
35
|
+
"dependencies": {
|
|
36
|
+
"@logtail/types": "^0.5.6"
|
|
37
|
+
},
|
|
38
|
+
"gitHead": "6901c2b90af7cf9dda3e7c8be7c145d4d83e72a8"
|
|
39
|
+
}
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
/**
|
|
3
|
+
* Logtail library options
|
|
4
|
+
*/
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.LogLevel = void 0;
|
|
7
|
+
var LogLevel;
|
|
8
|
+
(function (LogLevel) {
|
|
9
|
+
// core log levels - available as functions
|
|
10
|
+
LogLevel["Error"] = "error";
|
|
11
|
+
LogLevel["Warn"] = "warn";
|
|
12
|
+
LogLevel["Info"] = "info";
|
|
13
|
+
LogLevel["Debug"] = "debug";
|
|
14
|
+
// extra log levels - recognized when passed from logging frameworks
|
|
15
|
+
LogLevel["Fatal"] = "fatal";
|
|
16
|
+
LogLevel["Http"] = "http";
|
|
17
|
+
LogLevel["Verbose"] = "verbose";
|
|
18
|
+
LogLevel["Silly"] = "silly";
|
|
19
|
+
LogLevel["Trace"] = "trace";
|
|
20
|
+
})(LogLevel || (exports.LogLevel = LogLevel = {}));
|
|
21
|
+
//# sourceMappingURL=types.js.map
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@logtail/types",
|
|
3
|
+
"version": "0.5.6",
|
|
4
|
+
"description": "Better Stack Typescript types (formerly Logtail)",
|
|
5
|
+
"keywords": [
|
|
6
|
+
"Better Stack",
|
|
7
|
+
"Logtail",
|
|
8
|
+
"logging",
|
|
9
|
+
"logger",
|
|
10
|
+
"typescript",
|
|
11
|
+
"types"
|
|
12
|
+
],
|
|
13
|
+
"author": "Better Stack <hello@betterstack.com>",
|
|
14
|
+
"homepage": "https://github.com/logtail/logtail-js/tree/master/packages/types#readme",
|
|
15
|
+
"license": "ISC",
|
|
16
|
+
"main": "dist/cjs/types.js",
|
|
17
|
+
"module": "dist/es6/types.js",
|
|
18
|
+
"types": "dist/es6/types.d.ts",
|
|
19
|
+
"sideEffects": false,
|
|
20
|
+
"files": [
|
|
21
|
+
"src",
|
|
22
|
+
"dist"
|
|
23
|
+
],
|
|
24
|
+
"repository": {
|
|
25
|
+
"type": "git",
|
|
26
|
+
"url": "git+https://github.com/logtail/logtail-js.git"
|
|
27
|
+
},
|
|
28
|
+
"publishConfig": {
|
|
29
|
+
"access": "public"
|
|
30
|
+
},
|
|
31
|
+
"scripts": {
|
|
32
|
+
"build:cjs": "tsc",
|
|
33
|
+
"build:es6": "tsc -p tsconfig.es6.json",
|
|
34
|
+
"build": "run-p build:*",
|
|
35
|
+
"prepublishOnly": "npm run build",
|
|
36
|
+
"test": "echo \"Error: run tests from root\" && exit 1"
|
|
37
|
+
},
|
|
38
|
+
"bugs": {
|
|
39
|
+
"url": "https://github.com/logtail/logtail-js/issues"
|
|
40
|
+
},
|
|
41
|
+
"devDependencies": {
|
|
42
|
+
"npm-run-all": "^4.1.5"
|
|
43
|
+
},
|
|
44
|
+
"gitHead": "6901c2b90af7cf9dda3e7c8be7c145d4d83e72a8"
|
|
45
|
+
}
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.CachedKeyDecoder = void 0;
|
|
4
|
+
const utf8_1 = require("./utils/utf8");
|
|
5
|
+
const DEFAULT_MAX_KEY_LENGTH = 16;
|
|
6
|
+
const DEFAULT_MAX_LENGTH_PER_KEY = 16;
|
|
7
|
+
class CachedKeyDecoder {
|
|
8
|
+
constructor(maxKeyLength = DEFAULT_MAX_KEY_LENGTH, maxLengthPerKey = DEFAULT_MAX_LENGTH_PER_KEY) {
|
|
9
|
+
this.maxKeyLength = maxKeyLength;
|
|
10
|
+
this.maxLengthPerKey = maxLengthPerKey;
|
|
11
|
+
this.hit = 0;
|
|
12
|
+
this.miss = 0;
|
|
13
|
+
// avoid `new Array(N)`, which makes a sparse array,
|
|
14
|
+
// because a sparse array is typically slower than a non-sparse array.
|
|
15
|
+
this.caches = [];
|
|
16
|
+
for (let i = 0; i < this.maxKeyLength; i++) {
|
|
17
|
+
this.caches.push([]);
|
|
18
|
+
}
|
|
19
|
+
}
|
|
20
|
+
canBeCached(byteLength) {
|
|
21
|
+
return byteLength > 0 && byteLength <= this.maxKeyLength;
|
|
22
|
+
}
|
|
23
|
+
find(bytes, inputOffset, byteLength) {
|
|
24
|
+
const records = this.caches[byteLength - 1];
|
|
25
|
+
FIND_CHUNK: for (const record of records) {
|
|
26
|
+
const recordBytes = record.bytes;
|
|
27
|
+
for (let j = 0; j < byteLength; j++) {
|
|
28
|
+
if (recordBytes[j] !== bytes[inputOffset + j]) {
|
|
29
|
+
continue FIND_CHUNK;
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
return record.str;
|
|
33
|
+
}
|
|
34
|
+
return null;
|
|
35
|
+
}
|
|
36
|
+
store(bytes, value) {
|
|
37
|
+
const records = this.caches[bytes.length - 1];
|
|
38
|
+
const record = { bytes, str: value };
|
|
39
|
+
if (records.length >= this.maxLengthPerKey) {
|
|
40
|
+
// `records` are full!
|
|
41
|
+
// Set `record` to an arbitrary position.
|
|
42
|
+
records[(Math.random() * records.length) | 0] = record;
|
|
43
|
+
}
|
|
44
|
+
else {
|
|
45
|
+
records.push(record);
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
decode(bytes, inputOffset, byteLength) {
|
|
49
|
+
const cachedValue = this.find(bytes, inputOffset, byteLength);
|
|
50
|
+
if (cachedValue != null) {
|
|
51
|
+
this.hit++;
|
|
52
|
+
return cachedValue;
|
|
53
|
+
}
|
|
54
|
+
this.miss++;
|
|
55
|
+
const str = (0, utf8_1.utf8DecodeJs)(bytes, inputOffset, byteLength);
|
|
56
|
+
// Ensure to copy a slice of bytes because the byte may be NodeJS Buffer and Buffer#slice() returns a reference to its internal ArrayBuffer.
|
|
57
|
+
const slicedCopyOfBytes = Uint8Array.prototype.slice.call(bytes, inputOffset, inputOffset + byteLength);
|
|
58
|
+
this.store(slicedCopyOfBytes, str);
|
|
59
|
+
return str;
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
exports.CachedKeyDecoder = CachedKeyDecoder;
|
|
63
|
+
//# sourceMappingURL=CachedKeyDecoder.js.map
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.DecodeError = void 0;
|
|
4
|
+
class DecodeError extends Error {
|
|
5
|
+
constructor(message) {
|
|
6
|
+
super(message);
|
|
7
|
+
// fix the prototype chain in a cross-platform way
|
|
8
|
+
const proto = Object.create(DecodeError.prototype);
|
|
9
|
+
Object.setPrototypeOf(this, proto);
|
|
10
|
+
Object.defineProperty(this, "name", {
|
|
11
|
+
configurable: true,
|
|
12
|
+
enumerable: false,
|
|
13
|
+
value: DecodeError.name,
|
|
14
|
+
});
|
|
15
|
+
}
|
|
16
|
+
}
|
|
17
|
+
exports.DecodeError = DecodeError;
|
|
18
|
+
//# sourceMappingURL=DecodeError.js.map
|