ciorent 0.0.26 → 0.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -1,72 +1,4 @@
1
1
  A lightweight, low-overhead concurrency library.
2
- ## Semaphore
3
- Semaphore is a concurrency primitive used to control access to a common resource by multiple processes.
4
-
5
- ```ts
6
- import * as semaphore from 'ciorent/semaphore';
7
- import * as cio from 'ciorent';
8
-
9
- const task = semaphore.wrap(
10
- async (task: number) => {
11
- for (let i = 1; i <= 5; i++) {
12
- console.log('Task', task, 'iteration', i);
13
- await cio.pause;
14
- }
15
-
16
- console.log('Task', task, 'end');
17
- }
18
- );
19
-
20
- // Only allow 2 task to run concurrently
21
- const sem = semaphore.init(2);
22
-
23
- // Try to run 6 tasks concurrently
24
- cio.concurrent(6, (sem, id) => task(sem, id), sem);
25
- ```
26
-
27
- ## Latch
28
- Latch is a synchronization primitive that allows one process to wait until another completes an operation before continuing execution.
29
-
30
- ```ts
31
- import * as latch from 'ciorent/latch';
32
- import * as cio from 'ciorent';
33
-
34
- const fetchLatch = latch.init();
35
-
36
- const task = async () => {
37
- // Blocks until the latch is open
38
- await latch.pause(fetchLatch);
39
-
40
- const res = await fetch('http://example.com');
41
- console.log('Fetch status:', res.status);
42
- }
43
-
44
- const prepare = () => {
45
- console.log('Run before fetch:', performance.now().toFixed(2));
46
- }
47
-
48
- const main = async () => {
49
- const p = task();
50
- await cio.sleep(500);
51
- prepare();
52
-
53
- // Allows all previously blocked tasks to run
54
- latch.open(fetchLatch);
55
-
56
- // Reclose the latch
57
- // Tasks that aren't blocked yet will be blocked
58
- latch.reset(fetchLatch);
59
-
60
- return p;
61
- }
62
-
63
- // Run fetch after 500ms
64
- await main();
65
-
66
- // Run fetch after another 500ms
67
- await main();
68
- ```
69
-
70
2
  ## Fibers
71
3
  Virtual threads with more controlled execution.
72
4
 
@@ -101,6 +33,33 @@ const thread2 = fiber.fn(function* (thread) {
101
33
  fiber.spawn(thread2);
102
34
  ```
103
35
 
36
+ ## Latch
37
+ Latch is a synchronization primitive that allows one process to wait until another completes an operation before continuing execution.
38
+
39
+ ```ts
40
+ import * as latch from 'ciorent/latch';
41
+
42
+ const startFetch = latch.init();
43
+
44
+ const task = async () => {
45
+ // Blocks until the latch is open
46
+ await latch.pause(startFetch);
47
+
48
+ console.log('Start fetching...');
49
+ const res = await fetch('http://example.com');
50
+ console.log('Fetch status:', res.status);
51
+ }
52
+
53
+ const prepare = () => {
54
+ // This always run first
55
+ console.log('Run before fetch:', performance.now().toFixed(2));
56
+ latch.open(startFetch);
57
+ }
58
+
59
+ task();
60
+ prepare();
61
+ ```
62
+
104
63
  ## Pubsub
105
64
  A fast, simple publish-subscribe API.
106
65
 
@@ -112,7 +71,7 @@ const messages = topic.init<number>();
112
71
 
113
72
  // A task that publish messages
114
73
  const publisher = async () => {
115
- for (let i = 0; i < 5; i++) {
74
+ for (let i = 0; i < 3; i++) {
116
75
  await cio.sleep(100);
117
76
  topic.pub(messages, i);
118
77
  }
@@ -122,15 +81,15 @@ const publisher = async () => {
122
81
  topic.flush(messages);
123
82
  }
124
83
 
125
- // Spawn 5 tasks that recieve messages
126
- cio.concurrent(5, async (id: number) => {
84
+ // Spawn 3 tasks that recieve messages
85
+ cio.concurrent(3, async (id: number) => {
127
86
  const sub = topic.sub(messages);
128
87
 
129
88
  while (true) {
130
89
  // Block until the value is sent
131
90
  const x = await topic.recieve(sub);
132
91
  if (x == null) break;
133
- console.log(`Task ${id}: ${x}`);
92
+ console.log(`Task ${id} recieved: ${x}`);
134
93
  }
135
94
  });
136
95
 
@@ -147,8 +106,8 @@ import * as cio from 'ciorent';
147
106
  const c = channel.init<number>();
148
107
 
149
108
  const run = async () => {
150
- for (let i = 0; i < 10; i++) {
151
- await cio.sleep(10);
109
+ for (let i = 0; i < 5; i++) {
110
+ await cio.sleep(100);
152
111
  channel.send(c, i);
153
112
  console.log('Sent', i);
154
113
  }
@@ -160,7 +119,7 @@ const run = async () => {
160
119
 
161
120
  const log = async () => {
162
121
  while (true) {
163
- // Wait until a value is sent
122
+ // Wait until a value is sent to the channel
164
123
  const x = await channel.recieve(c);
165
124
  if (x == null) break;
166
125
 
@@ -168,13 +127,42 @@ const log = async () => {
168
127
  };
169
128
  }
170
129
 
171
- run();
172
130
  log();
131
+ run();
173
132
 
174
133
  // This runs first
175
134
  console.log('Starting...');
176
135
  ```
177
136
 
137
+ ## Semaphore
138
+ Semaphore is a concurrency primitive used to control access to a common resource by multiple processes.
139
+
140
+ ```ts
141
+ import * as semaphore from 'ciorent/semaphore';
142
+ import * as cio from 'ciorent';
143
+
144
+ // Only allow 2 task to run concurrently
145
+ const sem = semaphore.init(2);
146
+
147
+ const task = async (id: number) => {
148
+ // Acquire the semaphore or wait for the semaphore to be available
149
+ await semaphore.pause(sem);
150
+
151
+ console.log('Task', id, 'started');
152
+
153
+ // Let the main thread schedules other tasks
154
+ for (let i = 1; i <= 5; i++) await cio.pause;
155
+
156
+ console.log('Task', id, 'end');
157
+
158
+ // Release the semaphore
159
+ semaphore.signal(sem);
160
+ }
161
+
162
+ // Try to run 5 tasks concurrently
163
+ cio.concurrent(5, task);
164
+ ```
165
+
178
166
  ## Utilities
179
167
  ### Pausing
180
168
  Delay the execution of a function for other asynchronous tasks to run.
@@ -185,9 +173,11 @@ import * as cio from 'ciorent';
185
173
  const task1 = async () => {
186
174
  let x = 0;
187
175
 
188
- // Pause to let task2 to run
176
+ // Yield control back to the runtime, allowing it to
177
+ // schedule other tasks
189
178
  await cio.pause;
190
179
 
180
+ // Simulate heavy operation
191
181
  for (let i = 0; i < (Math.random() + 15) * 1e6; i++)
192
182
  x += Math.random() * 32 + i * Math.round(Math.random() * 16);
193
183
 
@@ -196,11 +186,8 @@ const task1 = async () => {
196
186
 
197
187
  // Short async task
198
188
  const task2 = async () => {
199
- console.log('Fetch start', performance.now().toFixed(2) + 'ms');
200
-
201
- // This will pause task2 to let task1 to continue running
189
+ console.log('Start fetching...');
202
190
  const txt = await fetch('http://example.com');
203
-
204
191
  console.log('Fetch status', txt.status);
205
192
  };
206
193
 
@@ -212,15 +199,20 @@ task2();
212
199
  ### Sleep
213
200
  Cross-runtime synchronous and asynchronous sleep functions.
214
201
  ```ts
215
- import { sleep, sleepSync } from 'ciorent';
202
+ import * as cio from 'ciorent';
216
203
 
217
- await sleep(500);
218
- console.log('Hi');
204
+ const logTime = (label: string) => console.log(label + ':', Math.floor(performance.now()) + 'ms');
219
205
 
220
- // This blocks the current thread
221
- // On the browser this only works in workers
222
- sleepSync(500);
223
- console.log('Hi');
206
+ logTime('Start');
207
+
208
+ // Non-blocking
209
+ await cio.sleep(500);
210
+ logTime('After about 0.5s');
211
+
212
+ // This blocks the event loop
213
+ // On the browser this only works in workers and blocks the worker thread
214
+ cio.sleepSync(500);
215
+ logTime('After another 0.5s');
224
216
  ```
225
217
 
226
218
  ### Spawning tasks
@@ -229,17 +221,17 @@ Utilities to create and run tasks.
229
221
  import * as cio from 'ciorent';
230
222
 
231
223
  const task = async (id: number) => {
232
- await cio.sleep(Math.random() * 20 + 50);
224
+ await cio.sleep((10 - id) * 20 + 50);
233
225
  console.log('Task', id, 'done');
234
226
  }
235
227
 
236
228
  // Spawn and run 5 tasks sequentially
237
229
  console.log('Running 5 tasks sequentially:');
238
- cio.sequential(5, task);
230
+ await cio.sequential(5, task);
239
231
 
240
232
  // Spawn and run 5 tasks concurrently
241
233
  console.log('Running 5 tasks concurrently:');
242
- cio.concurrent(5, task);
234
+ await cio.concurrent(5, task);
243
235
  ```
244
236
 
245
237
  ### Debounce
@@ -263,7 +255,7 @@ import * as cio from 'ciorent';
263
255
 
264
256
  // Allow 1 call in 500ms
265
257
  const fn = cio.rateLimit((id: number) => {
266
- console.log('ID:', id);
258
+ console.log('Call ' + id + ':', Math.floor(performance.now()) + 'ms');
267
259
  }, 500, 1);
268
260
 
269
261
  fn(1); // fn(1) gets executed
@@ -283,7 +275,6 @@ const fn = cio.throttle((id: number) => {
283
275
  console.log(id + ': ' + Math.floor(performance.now()) + 'ms');
284
276
  }, 500, 2);
285
277
 
286
- for (let i = 0; i < 8; i++)
287
- fn(i);
278
+ cio.concurrent(8, (id) => fn(id));
288
279
  ```
289
280
 
package/channel.js CHANGED
@@ -1 +1 @@
1
- export let init=()=>{let qu=[null,null];let resolveQu=[null,null];return[qu,qu,resolveQu,resolveQu]};export let send=(c,t)=>{if(c[3][1]!==null)(c[3]=c[3][1])[0](t);else c[0]=c[0][1]=[t,null]};export let recieve=(c)=>c[1][1]!==null?Promise.resolve((c[1]=c[1][1])[0]):new Promise((res)=>{c[2]=c[2][1]=[res,null]});export let poll=(c)=>c[1][1]!==null?(c[1]=c[1][1])[0]:undefined;export let flush=(c)=>{while(c[3][1]!==null)(c[3]=c[3][1])[0]()};
1
+ export let init=()=>{let qu=[null];let resolveQu=[null,null];return[qu,qu,resolveQu,resolveQu]};export let send=(c,t)=>{if(c[3][0]!==null)(c[3]=c[3][0])[1](t);else c[0]=c[0][0]=[null,t]};export let recieve=(c)=>c[1][0]!==null?Promise.resolve((c[1]=c[1][0])[1]):new Promise((res)=>{c[2]=c[2][0]=[null,res]});export let poll=(c)=>c[1][0]!==null?(c[0]=c[1][0])[1]:undefined;export let flush=(c)=>{while(c[3][0]!==null)(c[3]=c[3][0])[1]()};
package/fiber.js CHANGED
@@ -1 +1 @@
1
- export let paused=(t)=>t[1]===0;export let running=(t)=>t[1]===1;export let done=(t)=>t[1]===2;let invoke=async(g,thread)=>{let t=g.next();while(!t.done){let v=await t.value;if(thread[1]===0){let r;let p=new Promise((res)=>{r=res});thread[2]=r;await p}if(thread[1]===2){thread[3].forEach(stop);return v}t=g.next(v)}thread[1]=2;thread[3].forEach(stop);return t.value};export let fn=(f)=>f;export let spawn=(f,...args)=>{let thread=[null,1,null,[]];thread[0]=invoke(f(thread,...args),thread);return thread};export let pause=(t)=>{if(t[1]===1)t[1]=0};export let resume=(t)=>{if(t[1]===0){t[1]=1;t[2]?.()}};export let stop=(t)=>{if(t[1]===0){t[1]=2;t[2]?.()}else t[1]=2};export function*join(t){return yield t[1]}export let finish=(t)=>t[1];export let mount=(child,parent)=>{parent[3].push(child)};export let control=(t,signal)=>{signal.addEventListener("abort",()=>{stop(t)})};export function*unwrap(t){return yield t}
1
+ export let paused=(t)=>t[1]===0;export let running=(t)=>t[1]===1;export let done=(t)=>t[1]===2;let invoke=async(g,thread)=>{try{let t=g.next();while(!t.done){let v=await t.value;if(thread[1]===0){let r;let p=new Promise((res)=>{r=res});thread[2]=r;await p}if(thread[1]===2){thread[3].forEach(stop);return v}t=g.next(v)}thread[1]=2;return t.value}finally{thread[3].forEach(stop)}};export let fn=(f)=>f;export let spawn=(f,...args)=>{let thread=[null,1,null,[]];thread[0]=invoke(f(thread,...args),thread);return thread};export let pause=(t)=>{if(t[1]===1)t[1]=0};export let resume=(t)=>{if(t[1]===0){t[1]=1;t[2]?.()}};export let stop=(t)=>{if(t[1]===0){t[1]=2;t[2]?.()}else t[1]=2};export function*join(t){return yield t[1]}export let finish=(t)=>t[1];export let mount=(child,parent)=>{parent[3].push(child)};export let control=(t,signal)=>{signal.addEventListener("abort",()=>{stop(t)})};export function*unwrap(t){return yield t}
package/fixed-queue.d.ts CHANGED
@@ -26,7 +26,7 @@ export interface FixedQueue<T extends {}> {
26
26
  /**
27
27
  * Describe a queue node (singly linked list node)
28
28
  */
29
- export type QueueNode<T> = [value: T, next: QueueNode<T> | null];
29
+ export type QueueNode<T> = [next: QueueNode<T> | null, value: T];
30
30
  /**
31
31
  * Create a fixed queue.
32
32
  * @param n - The queue size
package/index.d.ts CHANGED
@@ -2,7 +2,7 @@
2
2
  * @module Other utilities
3
3
  */
4
4
  /**
5
- * Continue running the function on next microtask.
5
+ * Yield back to main thread.
6
6
  *
7
7
  * You can `await` this **occasionally** in an expensive synchronous operation to avoid
8
8
  *
@@ -10,13 +10,15 @@
10
10
  */
11
11
  export declare const pause: Promise<void>;
12
12
  /**
13
- * Sleep for a duration
13
+ * Sleep for a duration.
14
14
  * @param ms - Sleep duration in milliseconds
15
15
  */
16
16
  export declare const sleep: (ms: number) => Promise<void>;
17
17
  /**
18
18
  * Sleep for a duration synchronously.
19
19
  *
20
+ * This method blocks the current thread.
21
+ *
20
22
  * On the browser it only works in workers.
21
23
  * @param ms - Sleep duration in milliseconds
22
24
  */
@@ -35,20 +37,20 @@ export declare const sequential: <const T extends any[]>(n: number, task: (...ar
35
37
  export declare const concurrent: <const T extends any[], const R>(n: number, task: (...args: [...T, id: number]) => Promise<R>, ...args: T) => Promise<R[]>;
36
38
  /**
37
39
  * Drop function calls until it doesn't get called for a specific period.
38
- * @param f - The target function to debounce
40
+ * @param f - The target function to debounce (it must not throw errors)
39
41
  * @param ms - The time period in milliseconds
40
42
  */
41
43
  export declare const debounce: <const Args extends any[]>(f: (...args: Args) => any, ms: number) => ((...args: Args) => void);
42
44
  /**
43
45
  * Drop function calls for a specific period
44
- * @param f - The target function to rate limit
46
+ * @param f - The target function to rate limit (it must not throw errors)
45
47
  * @param ms - The time period in milliseconds
46
48
  * @param limit - The call limit in the time period
47
49
  */
48
50
  export declare const rateLimit: <const Args extends any[]>(f: (...args: Args) => any, ms: number, limit: number) => ((...args: Args) => void);
49
51
  /**
50
52
  * Throttle function execution for a time period
51
- * @param f - The function to throttle
53
+ * @param f - The function to throttle (it must not throw errors)
52
54
  * @param ms - The time in milliseconds
53
55
  * @param limit - The call limit in the time period
54
56
  */
package/index.js CHANGED
@@ -1 +1 @@
1
- export let pause=Promise.resolve();export let sleep=globalThis.Bun?.sleep??globalThis.process?.getBuiltinModule?.("timers/promises").setTimeout??((ms)=>new Promise((res)=>{setTimeout(res,ms)}));let sharedBuf=new Int32Array(new SharedArrayBuffer(4));export let sleepSync=globalThis.Bun?.sleepSync??((ms)=>{Atomics.wait(sharedBuf,0,0,ms)});export let sequential=async(n,task,...args)=>{for(let i=0;i<n;i++)await task(...args,i)};export let concurrent=(n,task,...args)=>{let arr=new Array(n);for(let i=0;i<n;i++)arr[i]=task(...args,i);return Promise.all(arr)};export let debounce=(f,ms)=>{let id;return(...a)=>{clearTimeout(id);id=setTimeout(f,ms,...a)}};export let rateLimit=(f,ms,limit)=>{let call=()=>{limit++};return(...a)=>{if(limit>0){limit--;try{f(...a)}finally{setTimeout(call,ms)}}}};export let throttle=(f,ms,limit)=>{let head=[null,null,null];let tail=head;let unlock=()=>{if(tail!==head){tail=tail[2];tail[0](f(...tail[1]));setTimeout(unlock,ms)}else limit++};return(...a)=>{if(limit===0){let r;let p=new Promise((res)=>{r=res});head=head[2]=[r,a,null];return p}limit--;setTimeout(unlock,ms);return f(...a)}};
1
+ export let pause=Promise.resolve();export let sleep=globalThis.Bun?.sleep??globalThis.process?.getBuiltinModule?.("timers/promises").setTimeout??((ms)=>new Promise((res)=>{setTimeout(res,ms)}));let sharedBuf=new Int32Array(new SharedArrayBuffer(4));export let sleepSync=globalThis.Bun?.sleepSync??((ms)=>{Atomics.wait(sharedBuf,0,0,ms)});export let sequential=async(n,task,...args)=>{for(let i=0;i<n;i++)await task(...args,i)};export let concurrent=(n,task,...args)=>{let arr=new Array(n);for(let i=0;i<n;i++)arr[i]=task(...args,i);return Promise.all(arr)};export let debounce=(f,ms)=>{let id;return(...a)=>{clearTimeout(id);id=setTimeout(f,ms,...a)}};export let rateLimit=(f,ms,limit)=>{let cur=limit;let unlock=()=>{cur=limit};return(...a)=>{if(cur>0){if(cur===1)setTimeout(unlock,ms);cur--;f(...a)}}};export let throttle=(f,ms,limit)=>{let head=[null];let tail=head;let cur=limit;let unlock=()=>{cur=limit;while(cur>0){if(tail===head)return;cur--;tail=tail[0];tail[1](f(...tail[2]))}setTimeout(unlock,ms)};return(...a)=>{if(cur===1){setTimeout(unlock,ms)}else if(cur===0){let r;let p=new Promise((res)=>{r=res});head=head[0]=[null,r,a];return p}cur--;return f(...a)}};
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "ciorent",
3
- "version": "0.0.26",
3
+ "version": "0.1.1",
4
4
  "description": "A lightweight, low-overhead concurrency library",
5
5
  "homepage": "https://ciorent.netlify.app",
6
6
  "repository": {
@@ -18,13 +18,13 @@
18
18
  "main": "./index.js",
19
19
  "types": "./index.d.ts",
20
20
  "exports": {
21
- "./fixed-queue": "./fixed-queue.js",
22
- "./latch": "./latch.js",
23
21
  "./sliding-queue": "./sliding-queue.js",
24
- ".": "./index.js",
25
22
  "./semaphore": "./semaphore.js",
26
23
  "./topic": "./topic.js",
24
+ ".": "./index.js",
25
+ "./latch": "./latch.js",
27
26
  "./channel": "./channel.js",
27
+ "./fixed-queue": "./fixed-queue.js",
28
28
  "./dropping-queue": "./dropping-queue.js",
29
29
  "./fiber": "./fiber.js"
30
30
  }
package/semaphore.js CHANGED
@@ -1 +1 @@
1
- import{pause as resolvedPromise}from"./index.js";export let init=(n)=>{let root=[null,null];return[n,root,root]};export let pause=(s)=>{s[0]--;if(s[0]<0){let r;let p=new Promise((res)=>{r=res});s[1]=s[1][1]=[r,null];return p}return resolvedPromise};export let signal=(s)=>{if(s[0]<0)(s[2]=s[2][1])[0]();s[0]++};export let wrap=(f)=>async(s,...a)=>{s[0]--;if(s[0]<0){let r;let p=new Promise((res)=>{r=res});s[1]=s[1][1]=[r,null];await p}try{return await f(...a)}finally{signal(s)}};
1
+ import{pause as resolvedPromise}from"./index.js";export let init=(n)=>{let root=[null];return[n,root,root]};export let pause=(s)=>{s[0]--;if(s[0]<0){let r;let p=new Promise((res)=>{r=res});s[1]=s[1][0]=[null,r];return p}return resolvedPromise};export let signal=(s)=>{if(s[0]<0)(s[2]=s[2][0])[1]();s[0]++};export let wrap=(f)=>async(s,...a)=>{s[0]--;if(s[0]<0){let r;let p=new Promise((res)=>{r=res});s[1]=s[1][0]=[null,r];await p}try{return await f(...a)}finally{signal(s)}};
package/topic.js CHANGED
@@ -1 +1 @@
1
- export let init=()=>[[null,null],[],[]];export let sub=(t)=>[t,t[0]];export let pub=(t,value)=>{let head=t[0]=t[0][1]=[value,null];for(let i=0,res=t[1],subs=t[2];i<res.length;i++){res[i](value);subs[i][1]=head}t[1]=[];t[2]=[]};export let flush=(t)=>{let head=t[0]=[null,null];for(let i=0,res=t[1],subs=t[2];i<res.length;i++){res[i]();subs[i][1]=head}t[1]=[];t[2]=[]};export let poll=(t)=>t[1][1]!==null?(t[1]=t[1][1])[0]:undefined;export let recieve=(t)=>{if(t[1][1]!==null)return Promise.resolve((t[1]=t[1][1])[0]);let topic=t[0];topic[2].push(t);return new Promise((res)=>{topic[1].push(res)})};
1
+ export let init=()=>[[null],[],[]];export let sub=(t)=>[t,t[0]];export let pub=(t,value)=>{let head=t[0]=t[0][0]=[null,value];for(let i=0,res=t[1],subs=t[2];i<res.length;i++){res[i](value);subs[i][1]=head}t[1]=[];t[2]=[]};export let flush=(t)=>{let head=t[0]=[null];for(let i=0,res=t[1],subs=t[2];i<res.length;i++){res[i]();subs[i][1]=head}t[1]=[];t[2]=[]};export let poll=(t)=>t[1][0]!==null?(t[1]=t[1][0])[1]:undefined;export let recieve=(t)=>{if(t[1][0]!==null)return Promise.resolve((t[1]=t[1][0])[1]);let topic=t[0];topic[2].push(t);return new Promise((res)=>{topic[1].push(res)})};