@dshahi468/nd-dataset-split 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (4) hide show
  1. package/README.md +157 -0
  2. package/index.d.ts +42 -0
  3. package/index.js +190 -0
  4. package/package.json +39 -0
package/README.md ADDED
@@ -0,0 +1,157 @@
1
+ # nd-dataset-split
2
+
3
+ 大きなデータセットを安全に分割し、非同期処理をチャンク単位で実行するための軽量ユーティリティです。
4
+ 「1リクエストあたり最大20件」などの制約があるAPI連携(例: Bedrock 呼び出し)を共通化できます。
5
+
6
+ ## 特徴
7
+
8
+ - データを最大20件単位でチャンク化
9
+ - `Promise.all` ベースの並列チャンク処理
10
+ - 並列数(`maxConcurrency`)の制御
11
+ - リトライ + バックオフ(指数バックオフ対応)
12
+ - エラーごとの再試行可否を `shouldRetry` で制御
13
+ - 結果のマージ処理を `mergeResults` で自由に定義
14
+ - TypeScript 型定義同梱(`index.d.ts`)
15
+
16
+ ## インストール
17
+
18
+ ### npm レジストリから
19
+
20
+ ```bash
21
+ npm install nd-dataset-split
22
+ ```
23
+
24
+ ## 使い方
25
+
26
+ ### 1. シンプルに使う(`processInChunks`)
27
+
28
+ ```ts
29
+ import { processInChunks } from "nd-dataset-split";
30
+
31
+ const data = Array.from({ length: 45 }, (_, i) => i + 1);
32
+
33
+ const result = await processInChunks(
34
+ data,
35
+ async (chunk) => chunk.map((n) => n * 2),
36
+ { chunkSize: 20 }, // 1〜20
37
+ );
38
+
39
+ console.log(result.length); // 45
40
+ ```
41
+
42
+ ### 2. 実運用向け(`processDatasetInChunks`)
43
+
44
+ ```ts
45
+ import { processDatasetInChunks } from "nd-dataset-split";
46
+
47
+ const output = await processDatasetInChunks(
48
+ events,
49
+ async (chunk, chunkIndex, totalChunks) => {
50
+ // or you can use your logic here
51
+ const res = await fetch("your-api-name", {
52
+ method: "POST",
53
+ headers: { "Content-Type": "application/json" },
54
+ body: JSON.stringify({
55
+ events: chunk,
56
+ }),
57
+ });
58
+
59
+ const payload = await res.json().catch(() => ({}));
60
+ if (!res.ok) {
61
+ throw new Error(
62
+ `Chunk ${chunkIndex + 1}/${totalChunks}: ${payload?.error || "Failed"}`,
63
+ );
64
+ }
65
+ return payload; // チャンク単位の結果
66
+ },
67
+ {
68
+ chunkSize: 20,
69
+ maxConcurrency: 3,
70
+ retries: 2,
71
+ retryDelayMs: 600,
72
+ backoffMultiplier: 1.5,
73
+ shouldRetry: (error) => {
74
+ const message = String((error as Error)?.message || "");
75
+ return message.includes("throttl") || message.includes("timeout");
76
+ },
77
+ mergeResults: (chunkResults) => chunkResults, // 必要に応じて独自マージ
78
+ },
79
+ );
80
+ ```
81
+
82
+ ## API
83
+
84
+ ## `chunkArray(data, chunkSize = 20)`
85
+
86
+ 配列をチャンクに分割します。
87
+
88
+ - `data`: 対象配列
89
+ - `chunkSize`: 1〜20 の整数
90
+ - 戻り値: `T[][]`
91
+
92
+ ## `wait(ms)`
93
+
94
+ 指定ミリ秒待機するヘルパーです。
95
+
96
+ - `ms`: 待機時間(ミリ秒)
97
+ - 戻り値: `Promise<void>`
98
+
99
+ ## `processInChunks(data, processChunk, options?)`
100
+
101
+ シンプルなチャンク処理用APIです。
102
+ 内部で全チャンクを `Promise.all` で一括実行します。
103
+
104
+ - `data`: 対象配列
105
+ - `processChunk`: `(chunk, chunkIndex, totalChunks) => Promise<R[] | R> | (R[] | R)`
106
+ - `options.chunkSize`:
107
+ - デフォルト `20`
108
+ - 1〜20 の整数
109
+ - `options.flatten`:
110
+ - デフォルト `true`
111
+ - `true`: 各チャンク結果を1つの配列にフラット化
112
+ - `false`: チャンクごとの結果配列をそのまま返却
113
+
114
+ ## `processDatasetInChunks(data, processChunk, options?)`
115
+
116
+ 実運用向けの拡張APIです。
117
+ 並列数制御、リトライ、バックオフ、カスタムマージに対応します。
118
+
119
+ - `data`: 対象配列
120
+ - `processChunk`: `(chunk, chunkIndex, totalChunks) => Promise<R> | R`
121
+ - `options.chunkSize`:
122
+ - デフォルト `20`
123
+ - 1〜20 の整数
124
+ - `options.maxConcurrency`:
125
+ - デフォルト `Infinity`
126
+ - 同時実行するチャンク数
127
+ - `options.retries`:
128
+ - デフォルト `0`
129
+ - 失敗時の再試行回数
130
+ - `options.retryDelayMs`:
131
+ - デフォルト `0`
132
+ - リトライ間隔の基準ミリ秒
133
+ - `options.backoffMultiplier`:
134
+ - デフォルト `1`
135
+ - 実遅延: `retryDelayMs * backoffMultiplier^attempt`
136
+ - `options.shouldRetry`:
137
+ - デフォルト `() => true`
138
+ - `false` を返すと即時失敗
139
+ - `options.mergeResults`:
140
+ - 指定時は `chunkResults` を任意の最終形式へ変換して返却
141
+ - 未指定時は `R[]`(チャンク結果配列)を返却
142
+
143
+ ## エラーハンドリングの考え方
144
+
145
+ - API制限(429 / throttling)や一時的なタイムアウトは `shouldRetry` で再試行対象にする
146
+ - 認可エラーや入力不正など恒久エラーは `shouldRetry` で `false` を返して即失敗
147
+ - エラーメッセージに `chunkIndex + 1` と `totalChunks` を入れると追跡しやすい
148
+
149
+ ## 注意点
150
+
151
+ - `chunkSize` は 20 を超えられません(仕様)
152
+ - `maxConcurrency` を上げすぎると、相手API側のレート制限にかかりやすくなります
153
+ - `mergeResults` で重複排除・集計・並び替えなどを実装すると、呼び出し側コードを薄くできます
154
+
155
+ ## ライセンス
156
+
157
+ ISC
package/index.d.ts ADDED
@@ -0,0 +1,42 @@
1
+ export type ProcessChunk<T, R> = (
2
+ chunk: T[],
3
+ chunkIndex: number,
4
+ totalChunks: number
5
+ ) => Promise<R> | R;
6
+
7
+ export type ShouldRetry = (
8
+ error: unknown,
9
+ attempt: number,
10
+ chunkIndex: number,
11
+ totalChunks: number
12
+ ) => boolean;
13
+
14
+ export type ProcessInChunksOptions = {
15
+ chunkSize?: number;
16
+ flatten?: boolean;
17
+ };
18
+
19
+ export type ProcessDatasetInChunksOptions<R, M> = {
20
+ chunkSize?: number;
21
+ maxConcurrency?: number;
22
+ retries?: number;
23
+ retryDelayMs?: number;
24
+ backoffMultiplier?: number;
25
+ shouldRetry?: ShouldRetry;
26
+ mergeResults?: (chunkResults: R[]) => M;
27
+ };
28
+
29
+ export function chunkArray<T>(data: T[], chunkSize?: number): T[][];
30
+ export function wait(ms: number): Promise<void>;
31
+
32
+ export function processInChunks<T, R>(
33
+ data: T[],
34
+ processChunk: ProcessChunk<T, R[] | R>,
35
+ options?: ProcessInChunksOptions
36
+ ): Promise<R[] | (R[] | R)[]>;
37
+
38
+ export function processDatasetInChunks<T, R, M = R[]>(
39
+ data: T[],
40
+ processChunk: ProcessChunk<T, R>,
41
+ options?: ProcessDatasetInChunksOptions<R, M>
42
+ ): Promise<M | R[]>;
package/index.js ADDED
@@ -0,0 +1,190 @@
1
+ /**
2
+ * Split an array into chunks.
3
+ * @template T
4
+ * @param {T[]} data
5
+ * @param {number} [chunkSize=20]
6
+ * @returns {T[][]}
7
+ */
8
+ export function chunkArray(data, chunkSize = 20) {
9
+ if (!Array.isArray(data)) {
10
+ throw new TypeError("data must be an array");
11
+ }
12
+
13
+ if (!Number.isInteger(chunkSize) || chunkSize <= 0 || chunkSize > 20) {
14
+ throw new RangeError("chunkSize must be an integer between 1 and 20");
15
+ }
16
+
17
+ const chunks = [];
18
+ for (let i = 0; i < data.length; i += chunkSize) {
19
+ chunks.push(data.slice(i, i + chunkSize));
20
+ }
21
+ return chunks;
22
+ }
23
+
24
+ /**
25
+ * Sleep helper for retry backoff.
26
+ * @param {number} ms
27
+ * @returns {Promise<void>}
28
+ */
29
+ export function wait(ms) {
30
+ return new Promise((resolve) => setTimeout(resolve, ms));
31
+ }
32
+
33
+ /**
34
+ * Process a large dataset in chunks (max 20 items per chunk) using Promise.all.
35
+ * @template T
36
+ * @template R
37
+ * @param {T[]} data
38
+ * @param {(chunk: T[], chunkIndex: number, totalChunks: number) => Promise<R[] | R> | (R[] | R)} processChunk
39
+ * @param {{ chunkSize?: number, flatten?: boolean }} [options]
40
+ * @returns {Promise<R[] | (R[] | R)[]>}
41
+ */
42
+ export async function processInChunks(data, processChunk, options = {}) {
43
+ if (typeof processChunk !== "function") {
44
+ throw new TypeError("processChunk must be a function");
45
+ }
46
+
47
+ const { chunkSize = 20, flatten = true } = options;
48
+ const chunks = chunkArray(data, chunkSize);
49
+
50
+ const results = await Promise.all(
51
+ chunks.map((chunk, chunkIndex) => processChunk(chunk, chunkIndex, chunks.length))
52
+ );
53
+
54
+ if (!flatten) return results;
55
+
56
+ const merged = [];
57
+ for (const result of results) {
58
+ if (Array.isArray(result)) {
59
+ merged.push(...result);
60
+ } else {
61
+ merged.push(result);
62
+ }
63
+ }
64
+ return merged;
65
+ }
66
+
67
+ /**
68
+ * Retry one chunk operation with configurable retry logic.
69
+ * @template T
70
+ * @template R
71
+ * @param {T[]} chunk
72
+ * @param {number} chunkIndex
73
+ * @param {number} totalChunks
74
+ * @param {(chunk: T[], chunkIndex: number, totalChunks: number) => Promise<R>} processChunk
75
+ * @param {{
76
+ * retries: number,
77
+ * getRetryDelay: (attempt: number, error: unknown) => number,
78
+ * shouldRetry: (error: unknown, attempt: number, chunkIndex: number, totalChunks: number) => boolean
79
+ * }} config
80
+ * @returns {Promise<R>}
81
+ */
82
+ async function runChunkWithRetry(chunk, chunkIndex, totalChunks, processChunk, config) {
83
+ let attempt = 0;
84
+
85
+ while (attempt <= config.retries) {
86
+ try {
87
+ return await processChunk(chunk, chunkIndex, totalChunks);
88
+ } catch (error) {
89
+ if (attempt >= config.retries) {
90
+ throw error;
91
+ }
92
+
93
+ if (!config.shouldRetry(error, attempt, chunkIndex, totalChunks)) {
94
+ throw error;
95
+ }
96
+
97
+ const delay = config.getRetryDelay(attempt, error);
98
+ if (delay > 0) {
99
+ await wait(delay);
100
+ }
101
+ attempt += 1;
102
+ }
103
+ }
104
+
105
+ throw new Error(`Chunk ${chunkIndex + 1}/${totalChunks} failed after retries`);
106
+ }
107
+
108
+ /**
109
+ * Process any dataset in chunks with controlled concurrency, retry, and custom merge.
110
+ * @template T
111
+ * @template R
112
+ * @template M
113
+ * @param {T[]} data
114
+ * @param {(chunk: T[], chunkIndex: number, totalChunks: number) => Promise<R> | R} processChunk
115
+ * @param {{
116
+ * chunkSize?: number,
117
+ * maxConcurrency?: number,
118
+ * retries?: number,
119
+ * retryDelayMs?: number,
120
+ * backoffMultiplier?: number,
121
+ * shouldRetry?: (error: unknown, attempt: number, chunkIndex: number, totalChunks: number) => boolean,
122
+ * mergeResults?: (chunkResults: R[]) => M
123
+ * }} [options]
124
+ * @returns {Promise<M | R[]>}
125
+ */
126
+ export async function processDatasetInChunks(data, processChunk, options = {}) {
127
+ if (typeof processChunk !== "function") {
128
+ throw new TypeError("processChunk must be a function");
129
+ }
130
+
131
+ const {
132
+ chunkSize = 20,
133
+ maxConcurrency = Number.POSITIVE_INFINITY,
134
+ retries = 0,
135
+ retryDelayMs = 0,
136
+ backoffMultiplier = 1,
137
+ shouldRetry = () => true,
138
+ mergeResults,
139
+ } = options;
140
+
141
+ if (!Number.isInteger(maxConcurrency) && maxConcurrency !== Number.POSITIVE_INFINITY) {
142
+ throw new TypeError("maxConcurrency must be a positive integer or Infinity");
143
+ }
144
+
145
+ if (
146
+ maxConcurrency !== Number.POSITIVE_INFINITY &&
147
+ (!Number.isInteger(maxConcurrency) || maxConcurrency <= 0)
148
+ ) {
149
+ throw new RangeError("maxConcurrency must be greater than 0");
150
+ }
151
+
152
+ if (!Number.isInteger(retries) || retries < 0) {
153
+ throw new RangeError("retries must be a non-negative integer");
154
+ }
155
+
156
+ if (typeof shouldRetry !== "function") {
157
+ throw new TypeError("shouldRetry must be a function");
158
+ }
159
+
160
+ if (mergeResults && typeof mergeResults !== "function") {
161
+ throw new TypeError("mergeResults must be a function");
162
+ }
163
+
164
+ const chunks = chunkArray(data, chunkSize);
165
+ /** @type {R[]} */
166
+ const chunkResults = [];
167
+
168
+ const getRetryDelay = (attempt) => retryDelayMs * Math.pow(backoffMultiplier, attempt);
169
+
170
+ for (let i = 0; i < chunks.length; i += maxConcurrency) {
171
+ const batch = chunks.slice(i, i + maxConcurrency);
172
+ const batchResults = await Promise.all(
173
+ batch.map((chunk, batchOffset) => {
174
+ const chunkIndex = i + batchOffset;
175
+ return runChunkWithRetry(chunk, chunkIndex, chunks.length, processChunk, {
176
+ retries,
177
+ getRetryDelay,
178
+ shouldRetry,
179
+ });
180
+ })
181
+ );
182
+ chunkResults.push(...batchResults);
183
+ }
184
+
185
+ if (mergeResults) {
186
+ return mergeResults(chunkResults);
187
+ }
188
+
189
+ return chunkResults;
190
+ }
package/package.json ADDED
@@ -0,0 +1,39 @@
1
+ {
2
+ "name": "@dshahi468/nd-dataset-split",
3
+ "version": "1.0.0",
4
+ "description": "Split large dataset and resolve timeout issues",
5
+ "keywords": [
6
+ "chunk",
7
+ "batch",
8
+ "dataset",
9
+ "promise-all",
10
+ "retry",
11
+ "concurrency",
12
+ "bedrock"
13
+ ],
14
+ "license": "ISC",
15
+ "author": "Dilendra Vikram Shahi",
16
+ "type": "module",
17
+ "main": "index.js",
18
+ "types": "index.d.ts",
19
+ "exports": {
20
+ ".": {
21
+ "types": "./index.d.ts",
22
+ "import": "./index.js"
23
+ }
24
+ },
25
+ "files": [
26
+ "index.js",
27
+ "index.d.ts",
28
+ "README.md"
29
+ ],
30
+ "publishConfig": {
31
+ "access": "public"
32
+ },
33
+ "engines": {
34
+ "node": ">=18"
35
+ },
36
+ "scripts": {
37
+ "test": "echo \"Error: no test specified\" && exit 1"
38
+ }
39
+ }