opticedge-cloud-utils 1.1.13 → 1.1.14
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/chunk.d.ts +1 -0
- package/dist/chunk.js +37 -0
- package/dist/env.js +1 -0
- package/dist/index.d.ts +1 -0
- package/dist/index.js +1 -0
- package/package.json +1 -1
- package/src/chunk.ts +33 -0
- package/src/env.ts +2 -0
- package/src/index.ts +1 -0
- package/tests/chunk.test.ts +48 -0
package/dist/chunk.d.ts
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export declare function chunkByBytes(arr: any[], maxBytes?: number): any[][];
|
package/dist/chunk.js
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
/* eslint-disable @typescript-eslint/no-explicit-any */
|
|
3
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
4
|
+
exports.chunkByBytes = chunkByBytes;
|
|
5
|
+
function chunkByBytes(arr, maxBytes = 700000) {
|
|
6
|
+
// 700KB safe default
|
|
7
|
+
const chunks = [];
|
|
8
|
+
let current = [];
|
|
9
|
+
let curBytes = 0;
|
|
10
|
+
for (const item of arr) {
|
|
11
|
+
const s = JSON.stringify(item);
|
|
12
|
+
const b = Buffer.byteLength(s, 'utf8');
|
|
13
|
+
// if a single item exceeds maxBytes, push it alone (or skip)
|
|
14
|
+
if (b > maxBytes) {
|
|
15
|
+
// optionally log and skip - here we'll push as its own chunk
|
|
16
|
+
if (current.length) {
|
|
17
|
+
chunks.push(current);
|
|
18
|
+
current = [];
|
|
19
|
+
curBytes = 0;
|
|
20
|
+
}
|
|
21
|
+
chunks.push([item]);
|
|
22
|
+
continue;
|
|
23
|
+
}
|
|
24
|
+
if (curBytes + b > maxBytes) {
|
|
25
|
+
chunks.push(current);
|
|
26
|
+
current = [item];
|
|
27
|
+
curBytes = b;
|
|
28
|
+
}
|
|
29
|
+
else {
|
|
30
|
+
current.push(item);
|
|
31
|
+
curBytes += b;
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
if (current.length)
|
|
35
|
+
chunks.push(current);
|
|
36
|
+
return chunks;
|
|
37
|
+
}
|
package/dist/env.js
CHANGED
package/dist/index.d.ts
CHANGED
package/dist/index.js
CHANGED
|
@@ -20,6 +20,7 @@ __exportStar(require("./db/mongo3"), exports);
|
|
|
20
20
|
__exportStar(require("./tw/utils"), exports);
|
|
21
21
|
__exportStar(require("./tw/wallet"), exports);
|
|
22
22
|
__exportStar(require("./auth"), exports);
|
|
23
|
+
__exportStar(require("./chunk"), exports);
|
|
23
24
|
__exportStar(require("./env"), exports);
|
|
24
25
|
__exportStar(require("./parser"), exports);
|
|
25
26
|
__exportStar(require("./regex"), exports);
|
package/package.json
CHANGED
package/src/chunk.ts
ADDED
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
/* eslint-disable @typescript-eslint/no-explicit-any */
|
|
2
|
+
|
|
3
|
+
export function chunkByBytes(arr: any[], maxBytes = 700_000) {
|
|
4
|
+
// 700KB safe default
|
|
5
|
+
const chunks: any[][] = []
|
|
6
|
+
let current: any[] = []
|
|
7
|
+
let curBytes = 0
|
|
8
|
+
for (const item of arr) {
|
|
9
|
+
const s = JSON.stringify(item)
|
|
10
|
+
const b = Buffer.byteLength(s, 'utf8')
|
|
11
|
+
// if a single item exceeds maxBytes, push it alone (or skip)
|
|
12
|
+
if (b > maxBytes) {
|
|
13
|
+
// optionally log and skip - here we'll push as its own chunk
|
|
14
|
+
if (current.length) {
|
|
15
|
+
chunks.push(current)
|
|
16
|
+
current = []
|
|
17
|
+
curBytes = 0
|
|
18
|
+
}
|
|
19
|
+
chunks.push([item])
|
|
20
|
+
continue
|
|
21
|
+
}
|
|
22
|
+
if (curBytes + b > maxBytes) {
|
|
23
|
+
chunks.push(current)
|
|
24
|
+
current = [item]
|
|
25
|
+
curBytes = b
|
|
26
|
+
} else {
|
|
27
|
+
current.push(item)
|
|
28
|
+
curBytes += b
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
if (current.length) chunks.push(current)
|
|
32
|
+
return chunks
|
|
33
|
+
}
|
package/src/env.ts
CHANGED
package/src/index.ts
CHANGED
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
import { chunkByBytes } from '../src/chunk'
|
|
2
|
+
|
|
3
|
+
describe('chunkByBytes', () => {
|
|
4
|
+
test('splits array into chunks based on byte size', () => {
|
|
5
|
+
const arr = [
|
|
6
|
+
{ id: 'a', val: 'x'.repeat(100) },
|
|
7
|
+
{ id: 'b', val: 'y'.repeat(100) },
|
|
8
|
+
{ id: 'c', val: 'z'.repeat(100) }
|
|
9
|
+
]
|
|
10
|
+
const chunks = chunkByBytes(arr, 200) // 200 bytes max per chunk
|
|
11
|
+
expect(chunks.length).toBeGreaterThan(1) // should split
|
|
12
|
+
expect(chunks.flat()).toEqual(arr) // all items present
|
|
13
|
+
})
|
|
14
|
+
|
|
15
|
+
test('puts single large item in its own chunk', () => {
|
|
16
|
+
const arr = [{ id: 'big', val: 'x'.repeat(1000) }]
|
|
17
|
+
const chunks = chunkByBytes(arr, 100)
|
|
18
|
+
expect(chunks).toEqual([arr]) // large item pushed alone
|
|
19
|
+
})
|
|
20
|
+
|
|
21
|
+
test('does not split if all items fit within maxBytes', () => {
|
|
22
|
+
const arr = [
|
|
23
|
+
{ id: 'a', val: 'x' },
|
|
24
|
+
{ id: 'b', val: 'y' }
|
|
25
|
+
]
|
|
26
|
+
const chunks = chunkByBytes(arr, 1000)
|
|
27
|
+
expect(chunks).toEqual([arr])
|
|
28
|
+
})
|
|
29
|
+
|
|
30
|
+
test('uses default maxBytes if not provided', () => {
|
|
31
|
+
const arr = [{ id: 'a', val: 'x'.repeat(10) }]
|
|
32
|
+
// call without maxBytes
|
|
33
|
+
const chunks = chunkByBytes(arr)
|
|
34
|
+
expect(chunks.flat()).toEqual(arr) // all items present
|
|
35
|
+
})
|
|
36
|
+
|
|
37
|
+
test('flushes current chunk when a single item exceeds maxBytes', () => {
|
|
38
|
+
const arr = [
|
|
39
|
+
{ id: '1', val: 'x'.repeat(50) }, // first small item
|
|
40
|
+
{ id: '2', val: 'y'.repeat(200) } // oversized item
|
|
41
|
+
]
|
|
42
|
+
// set maxBytes small enough to trigger oversized item
|
|
43
|
+
const chunks = chunkByBytes(arr, 100)
|
|
44
|
+
expect(chunks.length).toBe(2) // first small chunk + oversized item alone
|
|
45
|
+
expect(chunks[0]).toEqual([arr[0]]) // first item in first chunk
|
|
46
|
+
expect(chunks[1]).toEqual([arr[1]]) // second item alone
|
|
47
|
+
})
|
|
48
|
+
})
|