ciorent 0.1.0 → 0.1.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -1,40 +1,31 @@
1
1
  A lightweight, low-overhead concurrency library.
2
- ## Channel
3
- Channel is a synchronization primitive via message passing. A message may be sent over a channel, and another process is able to receive messages sent over a channel it has a reference to.
2
+ ## Semaphore
3
+ Semaphore is a concurrency primitive used to control access to a common resource by multiple processes.
4
4
 
5
5
  ```ts
6
- import * as channel from 'ciorent/channel';
6
+ import * as semaphore from 'ciorent/semaphore';
7
7
  import * as cio from 'ciorent';
8
8
 
9
- const c = channel.init<number>();
9
+ // Only allow 2 task to run concurrently
10
+ const sem = semaphore.init(2);
10
11
 
11
- const run = async () => {
12
- for (let i = 0; i < 5; i++) {
13
- await cio.sleep(100);
14
- channel.send(c, i);
15
- console.log('Sent', i);
16
- }
12
+ const task = async (id: number) => {
13
+ // Acquire the semaphore or wait for the semaphore to be available
14
+ await semaphore.pause(sem);
17
15
 
18
- // Resolve all waiting promises with `undefined`
19
- // This is a way to tell the reciever to not listen to more data
20
- channel.flush(c);
21
- };
16
+ console.log('Task', id, 'started');
22
17
 
23
- const log = async () => {
24
- while (true) {
25
- // Wait until a value is sent to the channel
26
- const x = await channel.recieve(c);
27
- if (x == null) break;
18
+ // Let the main thread schedules other tasks
19
+ for (let i = 1; i <= 5; i++) await cio.pause;
28
20
 
29
- console.log('Recieved', x);
30
- };
31
- }
21
+ console.log('Task', id, 'end');
32
22
 
33
- log();
34
- run();
23
+ // Release the semaphore
24
+ semaphore.signal(sem);
25
+ }
35
26
 
36
- // This runs first
37
- console.log('Starting...');
27
+ // Try to run 5 tasks concurrently
28
+ cio.concurrent(5, task);
38
29
  ```
39
30
 
40
31
  ## Fibers
@@ -76,42 +67,26 @@ Latch is a synchronization primitive that allows one process to wait until anoth
76
67
 
77
68
  ```ts
78
69
  import * as latch from 'ciorent/latch';
79
- import * as cio from 'ciorent';
80
70
 
81
- const fetchLatch = latch.init();
71
+ const startFetch = latch.init();
82
72
 
83
73
  const task = async () => {
84
74
  // Blocks until the latch is open
85
- await latch.pause(fetchLatch);
75
+ await latch.pause(startFetch);
86
76
 
77
+ console.log('Start fetching...');
87
78
  const res = await fetch('http://example.com');
88
79
  console.log('Fetch status:', res.status);
89
80
  }
90
81
 
91
82
  const prepare = () => {
83
+ // This always run first
92
84
  console.log('Run before fetch:', performance.now().toFixed(2));
85
+ latch.open(startFetch);
93
86
  }
94
87
 
95
- const main = async () => {
96
- const p = task();
97
- await cio.sleep(500);
98
- prepare();
99
-
100
- // Allows all previously blocked tasks to run
101
- latch.open(fetchLatch);
102
-
103
- // Reclose the latch
104
- // Tasks that aren't blocked yet will be blocked
105
- latch.reset(fetchLatch);
106
-
107
- return p;
108
- }
109
-
110
- // Run fetch after 500ms
111
- await main();
112
-
113
- // Run fetch after another 500ms
114
- await main();
88
+ task();
89
+ prepare();
115
90
  ```
116
91
 
117
92
  ## Pubsub
@@ -150,29 +125,42 @@ cio.concurrent(3, async (id: number) => {
150
125
  publisher();
151
126
  ```
152
127
 
153
- ## Semaphore
154
- Semaphore is a concurrency primitive used to control access to a common resource by multiple processes.
128
+ ## Channel
129
+ Channel is a synchronization primitive via message passing. A message may be sent over a channel, and another process is able to receive messages sent over a channel it has a reference to.
155
130
 
156
131
  ```ts
157
- import * as semaphore from 'ciorent/semaphore';
132
+ import * as channel from 'ciorent/channel';
158
133
  import * as cio from 'ciorent';
159
134
 
160
- const task = semaphore.wrap(
161
- async (task: number) => {
162
- console.log('Task', task, 'started');
163
-
164
- for (let i = 1; i <= 5; i++)
165
- await cio.pause;
135
+ const c = channel.init<number>();
166
136
 
167
- console.log('Task', task, 'end');
137
+ const run = async () => {
138
+ for (let i = 0; i < 5; i++) {
139
+ await cio.sleep(100);
140
+ channel.send(c, i);
141
+ console.log('Sent', i);
168
142
  }
169
- );
170
143
 
171
- // Only allow 2 task to run concurrently
172
- const sem = semaphore.init(2);
144
+ // Resolve all waiting promises with `undefined`
145
+ // This is a way to tell the reciever to not listen to more data
146
+ channel.flush(c);
147
+ };
173
148
 
174
- // Try to run 6 tasks concurrently
175
- cio.concurrent(6, (sem, id) => task(sem, id), sem);
149
+ const log = async () => {
150
+ while (true) {
151
+ // Wait until a value is sent to the channel
152
+ const x = await channel.recieve(c);
153
+ if (x == null) break;
154
+
155
+ console.log('Recieved', x);
156
+ };
157
+ }
158
+
159
+ log();
160
+ run();
161
+
162
+ // This runs first
163
+ console.log('Starting...');
176
164
  ```
177
165
 
178
166
  ## Utilities
@@ -185,9 +173,11 @@ import * as cio from 'ciorent';
185
173
  const task1 = async () => {
186
174
  let x = 0;
187
175
 
188
- // Pause to let task2 to run
176
+ // Yield control back to the runtime, allowing it to
177
+ // schedule other tasks
189
178
  await cio.pause;
190
179
 
180
+ // Simulate heavy operation
191
181
  for (let i = 0; i < (Math.random() + 15) * 1e6; i++)
192
182
  x += Math.random() * 32 + i * Math.round(Math.random() * 16);
193
183
 
@@ -196,11 +186,8 @@ const task1 = async () => {
196
186
 
197
187
  // Short async task
198
188
  const task2 = async () => {
199
- console.log('Fetch start', performance.now().toFixed(2) + 'ms');
200
-
201
- // This will pause task2 to let task1 to continue running
189
+ console.log('Start fetching...');
202
190
  const txt = await fetch('http://example.com');
203
-
204
191
  console.log('Fetch status', txt.status);
205
192
  };
206
193
 
@@ -214,13 +201,18 @@ Cross-runtime synchronous and asynchronous sleep functions.
214
201
  ```ts
215
202
  import * as cio from 'ciorent';
216
203
 
204
+ const logTime = (label: string) => console.log(label + ':', Math.floor(performance.now()) + 'ms');
205
+
206
+ logTime('Start');
207
+
208
+ // Non-blocking
217
209
  await cio.sleep(500);
218
- console.log('Hi');
210
+ logTime('After about 0.5s');
219
211
 
220
- // This blocks the current thread
221
- // On the browser this only works in workers
212
+ // This blocks the event loop
213
+ // On the browser this only works in workers and blocks the worker thread
222
214
  cio.sleepSync(500);
223
- console.log('Hi');
215
+ logTime('After another 0.5s');
224
216
  ```
225
217
 
226
218
  ### Spawning tasks
@@ -261,16 +253,16 @@ Limits the number of calls within a time window.
261
253
  ```ts
262
254
  import * as cio from 'ciorent';
263
255
 
264
- // Allow 1 call in 500ms
256
+ // Allow 2 calls in 500ms, other calls are dropped
265
257
  const fn = cio.rateLimit((id: number) => {
266
- console.log('ID:', id);
267
- }, 500, 1);
258
+ console.log('Call ' + id + ':', Math.floor(performance.now()) + 'ms');
259
+ }, 500, 2);
268
260
 
269
- fn(1); // fn(1) gets executed
270
- await cio.sleep(100);
271
- fn(2); // fn(2) gets skipped
272
- await cio.sleep(500);
273
- fn(3); // fn(3) gets executed
261
+ // Some calls will be dropped
262
+ for (let i = 0; i < 8; i++) {
263
+ fn(i);
264
+ await cio.sleep(400);
265
+ }
274
266
  ```
275
267
 
276
268
  ### Throttle
@@ -283,6 +275,6 @@ const fn = cio.throttle((id: number) => {
283
275
  console.log(id + ': ' + Math.floor(performance.now()) + 'ms');
284
276
  }, 500, 2);
285
277
 
286
- cio.concurrent(8, (id) => fn(id));
278
+ cio.concurrent(8, fn);
287
279
  ```
288
280
 
package/fiber.js CHANGED
@@ -1 +1 @@
1
- export let paused=(t)=>t[1]===0;export let running=(t)=>t[1]===1;export let done=(t)=>t[1]===2;let invoke=async(g,thread)=>{try{let t=g.next();while(!t.done){let v=await t.value;if(thread[1]===0){let r;let p=new Promise((res)=>{r=res});thread[2]=r;await p}if(thread[1]===2){thread[3].forEach(stop);return v}t=g.next(v)}thread[1]=2;return t.value}finally{thread[3].forEach(stop)}};export let fn=(f)=>f;export let spawn=(f,...args)=>{let thread=[null,1,null,[]];thread[0]=invoke(f(thread,...args),thread);return thread};export let pause=(t)=>{if(t[1]===1)t[1]=0};export let resume=(t)=>{if(t[1]===0){t[1]=1;t[2]?.()}};export let stop=(t)=>{if(t[1]===0){t[1]=2;t[2]?.()}else t[1]=2};export function*join(t){return yield t[1]}export let finish=(t)=>t[1];export let mount=(child,parent)=>{parent[3].push(child)};export let control=(t,signal)=>{signal.addEventListener("abort",()=>{stop(t)})};export function*unwrap(t){return yield t}
1
+ export let paused=(t)=>t[1]===0;export let running=(t)=>t[1]===1;export let done=(t)=>t[1]===2;let invoke=async(g,thread)=>{try{let t=g.next();while(!t.done){let v=await t.value;if(thread[1]===0){let r;let p=new Promise((res)=>{r=res});thread[2]=r;await p}if(thread[1]===2)return v;t=g.next(v)}thread[1]=2;return t.value}finally{thread[3].forEach(stop)}};export let fn=(f)=>f;export let spawn=(f,...args)=>{let thread=[null,1,null,[]];thread[0]=invoke(f(thread,...args),thread);return thread};export let pause=(t)=>{if(t[1]===1)t[1]=0};export let resume=(t)=>{if(t[1]===0){t[1]=1;t[2]?.()}};export let stop=(t)=>{if(t[1]===0){t[1]=2;t[2]?.()}else t[1]=2};export function*join(t){return yield t[1]}export let finish=(t)=>t[1];export let mount=(child,parent)=>{parent[3].push(child)};export let control=(t,signal)=>{signal.addEventListener("abort",()=>{stop(t)})};export function*unwrap(t){return yield t}
package/index.d.ts CHANGED
@@ -2,7 +2,7 @@
2
2
  * @module Other utilities
3
3
  */
4
4
  /**
5
- * Continue running the function on next microtask.
5
+ * Yield back to main thread.
6
6
  *
7
7
  * You can `await` this **occasionally** in an expensive synchronous operation to avoid
8
8
  *
@@ -10,13 +10,15 @@
10
10
  */
11
11
  export declare const pause: Promise<void>;
12
12
  /**
13
- * Sleep for a duration
13
+ * Sleep for a duration.
14
14
  * @param ms - Sleep duration in milliseconds
15
15
  */
16
16
  export declare const sleep: (ms: number) => Promise<void>;
17
17
  /**
18
18
  * Sleep for a duration synchronously.
19
19
  *
20
+ * This method blocks the current thread.
21
+ *
20
22
  * On the browser it only works in workers.
21
23
  * @param ms - Sleep duration in milliseconds
22
24
  */
package/index.js CHANGED
@@ -1 +1 @@
1
- export let pause=Promise.resolve();export let sleep=globalThis.Bun?.sleep??globalThis.process?.getBuiltinModule?.("timers/promises").setTimeout??((ms)=>new Promise((res)=>{setTimeout(res,ms)}));let sharedBuf=new Int32Array(new SharedArrayBuffer(4));export let sleepSync=globalThis.Bun?.sleepSync??((ms)=>{Atomics.wait(sharedBuf,0,0,ms)});export let sequential=async(n,task,...args)=>{for(let i=0;i<n;i++)await task(...args,i)};export let concurrent=(n,task,...args)=>{let arr=new Array(n);for(let i=0;i<n;i++)arr[i]=task(...args,i);return Promise.all(arr)};export let debounce=(f,ms)=>{let id;return(...a)=>{clearTimeout(id);id=setTimeout(f,ms,...a)}};export let rateLimit=(f,ms,limit)=>{let call=()=>{limit++};return(...a)=>{if(limit>0){limit--;f(...a);setTimeout(call,ms)}}};export let throttle=(f,ms,limit)=>{let head=[null];let tail=head;let unlock=()=>{if(tail!==head){tail=tail[0];tail[1](f(...tail[2]));setTimeout(unlock,ms)}else limit++};return(...a)=>{if(limit===0){let r;let p=new Promise((res)=>{r=res});head=head[0]=[null,r,a];return p}limit--;setTimeout(unlock,ms);return f(...a)}};
1
+ export let pause=Promise.resolve();export let sleep=globalThis.Bun?.sleep??globalThis.process?.getBuiltinModule?.("timers/promises").setTimeout??((ms)=>new Promise((res)=>{setTimeout(res,ms)}));let sharedBuf=new Int32Array(new SharedArrayBuffer(4));export let sleepSync=globalThis.Bun?.sleepSync??((ms)=>{Atomics.wait(sharedBuf,0,0,ms)});export let sequential=async(n,task,...args)=>{for(let i=0;i<n;i++)await task(...args,i)};export let concurrent=(n,task,...args)=>{let arr=new Array(n);for(let i=0;i<n;i++)arr[i]=task(...args,i);return Promise.all(arr)};export let debounce=(f,ms)=>{let id;return(...a)=>{clearTimeout(id);id=setTimeout(f,ms,...a)}};export let rateLimit=(f,ms,limit)=>{let cur=limit;let unlock=()=>{cur=limit};return(...a)=>{if(cur>0){if(cur===1)setTimeout(unlock,ms);cur--;f(...a)}}};export let throttle=(f,ms,limit)=>{let head=[null];let tail=head;let cur=limit;let unlock=()=>{cur=limit;while(cur>0){if(tail===head)return;cur--;tail=tail[0];tail[1](f(...tail[2]))}setTimeout(unlock,ms)};return(...a)=>{if(cur===1){setTimeout(unlock,ms)}else if(cur===0){let r;let p=new Promise((res)=>{r=res});head=head[0]=[null,r,a];return p}cur--;return f(...a)}};
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "ciorent",
3
- "version": "0.1.0",
3
+ "version": "0.1.2",
4
4
  "description": "A lightweight, low-overhead concurrency library",
5
5
  "homepage": "https://ciorent.netlify.app",
6
6
  "repository": {
@@ -20,12 +20,12 @@
20
20
  "exports": {
21
21
  "./fixed-queue": "./fixed-queue.js",
22
22
  "./sliding-queue": "./sliding-queue.js",
23
- "./fiber": "./fiber.js",
24
- "./latch": "./latch.js",
25
23
  "./dropping-queue": "./dropping-queue.js",
24
+ "./fiber": "./fiber.js",
25
+ "./channel": "./channel.js",
26
+ ".": "./index.js",
26
27
  "./topic": "./topic.js",
27
28
  "./semaphore": "./semaphore.js",
28
- ".": "./index.js",
29
- "./channel": "./channel.js"
29
+ "./latch": "./latch.js"
30
30
  }
31
31
  }
package/semaphore.d.ts CHANGED
@@ -32,6 +32,6 @@ export declare const pause: (s: Semaphore) => Promise<void>;
32
32
  */
33
33
  export declare const signal: (s: Semaphore) => void;
34
34
  /**
35
- * Wrap a task to bind to a custom semaphore later
35
+ * Bind a task to a semaphore
36
36
  */
37
- export declare const wrap: <Args extends any[], Return extends Promise<any>>(f: (...args: Args) => Return) => ((s: Semaphore, ...a: Args) => Return);
37
+ export declare const bind: <T extends (...args: any[]) => Promise<any>>(f: T, s: Semaphore) => T;
package/semaphore.js CHANGED
@@ -1 +1 @@
1
- import{pause as resolvedPromise}from"./index.js";export let init=(n)=>{let root=[null];return[n,root,root]};export let pause=(s)=>{s[0]--;if(s[0]<0){let r;let p=new Promise((res)=>{r=res});s[1]=s[1][0]=[null,r];return p}return resolvedPromise};export let signal=(s)=>{if(s[0]<0)(s[2]=s[2][0])[1]();s[0]++};export let wrap=(f)=>async(s,...a)=>{s[0]--;if(s[0]<0){let r;let p=new Promise((res)=>{r=res});s[1]=s[1][0]=[null,r];await p}try{return await f(...a)}finally{signal(s)}};
1
+ import{pause as resolvedPromise}from"./index.js";export let init=(n)=>{let root=[null];return[n,root,root]};export let pause=(s)=>{s[0]--;if(s[0]<0){let r;let p=new Promise((res)=>{r=res});s[1]=s[1][0]=[null,r];return p}return resolvedPromise};export let signal=(s)=>{if(s[0]<0)(s[2]=s[2][0])[1]();s[0]++};export let bind=(f,s)=>async(...a)=>{s[0]--;if(s[0]<0){let r;let p=new Promise((res)=>{r=res});s[1]=s[1][0]=[null,r];await p}try{return await f(...a)}finally{if(s[0]<0)(s[2]=s[2][0])[1]();s[0]++}};