@push.rocks/taskbuffer 3.1.9 → 3.1.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -3,7 +3,7 @@
3
3
  */
4
4
  export const commitinfo = {
5
5
  name: '@push.rocks/taskbuffer',
6
- version: '3.1.9',
6
+ version: '3.1.10',
7
7
  description: 'A flexible task management library supporting TypeScript, allowing for task buffering, scheduling, and execution with dependency management.'
8
8
  };
9
- //# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiMDBfY29tbWl0aW5mb19kYXRhLmpzIiwic291cmNlUm9vdCI6IiIsInNvdXJjZXMiOlsiLi4vdHMvMDBfY29tbWl0aW5mb19kYXRhLnRzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiJBQUFBOztHQUVHO0FBQ0gsTUFBTSxDQUFDLE1BQU0sVUFBVSxHQUFHO0lBQ3hCLElBQUksRUFBRSx3QkFBd0I7SUFDOUIsT0FBTyxFQUFFLE9BQU87SUFDaEIsV0FBVyxFQUFFLDhJQUE4STtDQUM1SixDQUFBIn0=
9
+ //# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiMDBfY29tbWl0aW5mb19kYXRhLmpzIiwic291cmNlUm9vdCI6IiIsInNvdXJjZXMiOlsiLi4vdHMvMDBfY29tbWl0aW5mb19kYXRhLnRzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiJBQUFBOztHQUVHO0FBQ0gsTUFBTSxDQUFDLE1BQU0sVUFBVSxHQUFHO0lBQ3hCLElBQUksRUFBRSx3QkFBd0I7SUFDOUIsT0FBTyxFQUFFLFFBQVE7SUFDakIsV0FBVyxFQUFFLDhJQUE4STtDQUM1SixDQUFBIn0=
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@push.rocks/taskbuffer",
3
- "version": "3.1.9",
3
+ "version": "3.1.10",
4
4
  "private": false,
5
5
  "description": "A flexible task management library supporting TypeScript, allowing for task buffering, scheduling, and execution with dependency management.",
6
6
  "main": "dist_ts/index.js",
package/readme.md CHANGED
@@ -72,6 +72,322 @@ const myTask = new Task({
72
72
  const result = await myTask.trigger();
73
73
  ```
74
74
 
75
+ ## TypeScript Generics Support 🔬
76
+
77
+ TaskBuffer leverages TypeScript's powerful generics system for complete type safety across your task chains and workflows.
78
+
79
+ ### Generic Task Functions
80
+
81
+ Tasks support generic type parameters for both input and output types:
82
+
83
+ ```typescript
84
+ import { Task, ITaskFunction } from '@push.rocks/taskbuffer';
85
+
86
+ // Define typed interfaces
87
+ interface UserData {
88
+ id: string;
89
+ name: string;
90
+ email: string;
91
+ }
92
+
93
+ interface ProcessedUser {
94
+ userId: string;
95
+ displayName: string;
96
+ normalized: boolean;
97
+ }
98
+
99
+ // Create strongly typed tasks
100
+ const processUserTask = new Task<UserData, ProcessedUser>({
101
+ name: 'ProcessUser',
102
+ taskFunction: async (user: UserData): Promise<ProcessedUser> => {
103
+ return {
104
+ userId: user.id,
105
+ displayName: user.name.toUpperCase(),
106
+ normalized: true
107
+ };
108
+ }
109
+ });
110
+
111
+ // Type safety enforced at compile time
112
+ const result: ProcessedUser = await processUserTask.trigger({
113
+ id: '123',
114
+ name: 'John Doe',
115
+ email: 'john@example.com'
116
+ });
117
+ ```
118
+
119
+ ### Generic Setup Values
120
+
121
+ Tasks can accept setup values through generics, perfect for configuration:
122
+
123
+ ```typescript
124
+ interface TaskConfig {
125
+ apiEndpoint: string;
126
+ retryCount: number;
127
+ timeout: number;
128
+ }
129
+
130
+ const configuredTask = new Task<TaskConfig>({
131
+ name: 'ConfiguredTask',
132
+ taskSetup: async () => ({
133
+ apiEndpoint: 'https://api.example.com',
134
+ retryCount: 3,
135
+ timeout: 5000
136
+ }),
137
+ taskFunction: async (data: any, setupValue: TaskConfig) => {
138
+ // setupValue is fully typed!
139
+ for (let i = 0; i < setupValue.retryCount; i++) {
140
+ try {
141
+ return await fetchWithTimeout(
142
+ setupValue.apiEndpoint,
143
+ setupValue.timeout
144
+ );
145
+ } catch (error) {
146
+ if (i === setupValue.retryCount - 1) throw error;
147
+ }
148
+ }
149
+ }
150
+ });
151
+ ```
152
+
153
+ ### Type-Safe Task Chains
154
+
155
+ Chain tasks with preserved type flow:
156
+
157
+ ```typescript
158
+ // Each task knows its input and output types
159
+ const fetchTask = new Task<void, UserData[]>({
160
+ name: 'FetchUsers',
161
+ taskFunction: async (): Promise<UserData[]> => {
162
+ return await api.getUsers();
163
+ }
164
+ });
165
+
166
+ const filterTask = new Task<UserData[], UserData[]>({
167
+ name: 'FilterActive',
168
+ taskFunction: async (users: UserData[]): Promise<UserData[]> => {
169
+ return users.filter(user => user.isActive);
170
+ }
171
+ });
172
+
173
+ const mapTask = new Task<UserData[], ProcessedUser[]>({
174
+ name: 'MapToProcessed',
175
+ taskFunction: async (users: UserData[]): Promise<ProcessedUser[]> => {
176
+ return users.map(transformUser);
177
+ }
178
+ });
179
+
180
+ // Type safety flows through the chain
181
+ const chain = new Taskchain({
182
+ name: 'UserPipeline',
183
+ taskArray: [fetchTask, filterTask, mapTask]
184
+ });
185
+
186
+ const finalResult: ProcessedUser[] = await chain.trigger();
187
+ ```
188
+
189
+ ## Buffer Behavior Deep Dive 🌊
190
+
191
+ The buffer system in TaskBuffer provides intelligent control over concurrent executions, preventing system overload while maximizing throughput.
192
+
193
+ ### How Buffering Works
194
+
195
+ When a task is buffered, TaskBuffer manages a queue of executions:
196
+
197
+ ```typescript
198
+ const bufferedTask = new Task({
199
+ name: 'BufferedOperation',
200
+ taskFunction: async (data) => {
201
+ console.log(`Processing: ${data}`);
202
+ await simulateWork();
203
+ return `Processed: ${data}`;
204
+ },
205
+ buffered: true,
206
+ bufferMax: 3 // Maximum 3 concurrent executions
207
+ });
208
+
209
+ // Trigger 10 executions rapidly
210
+ for (let i = 0; i < 10; i++) {
211
+ bufferedTask.trigger(`Item ${i}`);
212
+ }
213
+
214
+ // What happens:
215
+ // 1. First 3 tasks start immediately
216
+ // 2. Items 4-10 are queued
217
+ // 3. As each task completes, next queued item starts
218
+ // 4. Never more than 3 tasks running simultaneously
219
+ ```
220
+
221
+ ### Buffer Truncation Behavior
222
+
223
+ When buffer limit is reached, new calls are intelligently managed:
224
+
225
+ ```typescript
226
+ const truncatingTask = new Task({
227
+ name: 'TruncatingBuffer',
228
+ taskFunction: async (data) => {
229
+ await processData(data);
230
+ },
231
+ buffered: true,
232
+ bufferMax: 5 // Maximum 5 in buffer
233
+ });
234
+
235
+ // Rapid fire 100 calls
236
+ for (let i = 0; i < 100; i++) {
237
+ truncatingTask.trigger(`Data ${i}`);
238
+ }
239
+
240
+ // Buffer behavior:
241
+ // - First 5 calls: Added to buffer and start processing
242
+ // - Calls 6-100: Each overwrites the 5th buffer slot
243
+ // - Result: Only processes items 0,1,2,3, and 99 (last one)
244
+ // - This prevents memory overflow in high-frequency scenarios
245
+ ```
246
+
247
+ ### Advanced Buffer Strategies
248
+
249
+ #### 1. **Sliding Window Buffer**
250
+ Perfect for real-time data processing where only recent items matter:
251
+
252
+ ```typescript
253
+ const slidingWindowTask = new Task({
254
+ name: 'SlidingWindow',
255
+ taskFunction: async (data) => {
256
+ return await analyzeRecentData(data);
257
+ },
258
+ buffered: true,
259
+ bufferMax: 10, // Keep last 10 items
260
+ execDelay: 100 // Process every 100ms
261
+ });
262
+
263
+ // In a real-time stream scenario
264
+ dataStream.on('data', (chunk) => {
265
+ slidingWindowTask.trigger(chunk);
266
+ // Older items automatically dropped when buffer full
267
+ });
268
+ ```
269
+
270
+ #### 2. **Throttled Buffer**
271
+ Combine buffering with execution delays for rate limiting:
272
+
273
+ ```typescript
274
+ const apiRateLimiter = new Task({
275
+ name: 'RateLimitedAPI',
276
+ taskFunction: async (request) => {
277
+ return await api.call(request);
278
+ },
279
+ buffered: true,
280
+ bufferMax: 10, // Max 10 queued requests
281
+ execDelay: 1000 // 1 second between executions
282
+ });
283
+
284
+ // Requests are queued and executed at 1/second
285
+ // Prevents API rate limit violations
286
+ ```
287
+
288
+ #### 3. **Priority Buffer** (Custom Implementation)
289
+ Implement priority queuing with buffer management:
290
+
291
+ ```typescript
292
+ class PriorityBufferedTask extends Task {
293
+ private priorityQueue: Array<{data: any, priority: number}> = [];
294
+
295
+ constructor(options) {
296
+ super({
297
+ ...options,
298
+ taskFunction: async (item) => {
299
+ // Process based on priority
300
+ return await this.processByPriority(item);
301
+ }
302
+ });
303
+ }
304
+
305
+ triggerWithPriority(data: any, priority: number) {
306
+ if (this.priorityQueue.length >= this.bufferMax) {
307
+ // Remove lowest priority item if buffer full
308
+ this.priorityQueue.sort((a, b) => b.priority - a.priority);
309
+ this.priorityQueue.pop();
310
+ }
311
+ this.priorityQueue.push({data, priority});
312
+ this.priorityQueue.sort((a, b) => b.priority - a.priority);
313
+ return this.trigger(this.priorityQueue.shift());
314
+ }
315
+ }
316
+ ```
317
+
318
+ ### Buffer Monitoring
319
+
320
+ Track buffer utilization and performance:
321
+
322
+ ```typescript
323
+ const monitoredTask = new Task({
324
+ name: 'MonitoredBuffer',
325
+ taskFunction: async (data) => {
326
+ const startTime = Date.now();
327
+ const result = await processData(data);
328
+ console.log(`Processing time: ${Date.now() - startTime}ms`);
329
+ console.log(`Buffer utilization: ${monitoredTask.bufferRunner.bufferCounter}/${monitoredTask.bufferMax}`);
330
+ return result;
331
+ },
332
+ buffered: true,
333
+ bufferMax: 20
334
+ });
335
+
336
+ // Monitor buffer saturation
337
+ setInterval(() => {
338
+ const utilization = (monitoredTask.bufferRunner.bufferCounter / monitoredTask.bufferMax) * 100;
339
+ if (utilization > 80) {
340
+ console.warn(`Buffer near capacity: ${utilization.toFixed(1)}%`);
341
+ }
342
+ }, 1000);
343
+ ```
344
+
345
+ ### Buffer Best Practices
346
+
347
+ 1. **Choose appropriate buffer sizes**:
348
+ - I/O operations: 5-10 concurrent
349
+ - CPU-intensive: Number of cores
350
+ - API calls: Based on rate limits
351
+
352
+ 2. **Handle buffer overflow gracefully**:
353
+ ```typescript
354
+ const task = new Task({
355
+ taskFunction: async (data) => {
356
+ try {
357
+ return await process(data);
358
+ } catch (error) {
359
+ if (error.code === 'BUFFER_OVERFLOW') {
360
+ // Implement backoff strategy
361
+ await delay(1000);
362
+ return task.trigger(data);
363
+ }
364
+ throw error;
365
+ }
366
+ },
367
+ buffered: true,
368
+ bufferMax: 10
369
+ });
370
+ ```
371
+
372
+ 3. **Monitor and adjust dynamically**:
373
+ ```typescript
374
+ // Adjust buffer size based on system load
375
+ const adaptiveTask = new Task({
376
+ name: 'AdaptiveBuffer',
377
+ taskFunction: async (data) => {
378
+ const cpuLoad = await getSystemLoad();
379
+ if (cpuLoad > 0.8) {
380
+ adaptiveTask.bufferMax = Math.max(2, adaptiveTask.bufferMax - 1);
381
+ } else if (cpuLoad < 0.5) {
382
+ adaptiveTask.bufferMax = Math.min(20, adaptiveTask.bufferMax + 1);
383
+ }
384
+ return await process(data);
385
+ },
386
+ buffered: true,
387
+ bufferMax: 10
388
+ });
389
+ ```
390
+
75
391
  ### Buffered Execution (Rate Limiting)
76
392
 
77
393
  Perfect for API calls or database operations that need throttling:
@@ -3,6 +3,6 @@
3
3
  */
4
4
  export const commitinfo = {
5
5
  name: '@push.rocks/taskbuffer',
6
- version: '3.1.9',
6
+ version: '3.1.10',
7
7
  description: 'A flexible task management library supporting TypeScript, allowing for task buffering, scheduling, and execution with dependency management.'
8
8
  }