ciorent 0.1.0 → 0.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -1,42 +1,4 @@
1
1
  A lightweight, low-overhead concurrency library.
2
- ## Channel
3
- Channel is a synchronization primitive via message passing. A message may be sent over a channel, and another process is able to receive messages sent over a channel it has a reference to.
4
-
5
- ```ts
6
- import * as channel from 'ciorent/channel';
7
- import * as cio from 'ciorent';
8
-
9
- const c = channel.init<number>();
10
-
11
- const run = async () => {
12
- for (let i = 0; i < 5; i++) {
13
- await cio.sleep(100);
14
- channel.send(c, i);
15
- console.log('Sent', i);
16
- }
17
-
18
- // Resolve all waiting promises with `undefined`
19
- // This is a way to tell the reciever to not listen to more data
20
- channel.flush(c);
21
- };
22
-
23
- const log = async () => {
24
- while (true) {
25
- // Wait until a value is sent to the channel
26
- const x = await channel.recieve(c);
27
- if (x == null) break;
28
-
29
- console.log('Recieved', x);
30
- };
31
- }
32
-
33
- log();
34
- run();
35
-
36
- // This runs first
37
- console.log('Starting...');
38
- ```
39
-
40
2
  ## Fibers
41
3
  Virtual threads with more controlled execution.
42
4
 
@@ -76,42 +38,26 @@ Latch is a synchronization primitive that allows one process to wait until anoth
76
38
 
77
39
  ```ts
78
40
  import * as latch from 'ciorent/latch';
79
- import * as cio from 'ciorent';
80
41
 
81
- const fetchLatch = latch.init();
42
+ const startFetch = latch.init();
82
43
 
83
44
  const task = async () => {
84
45
  // Blocks until the latch is open
85
- await latch.pause(fetchLatch);
46
+ await latch.pause(startFetch);
86
47
 
48
+ console.log('Start fetching...');
87
49
  const res = await fetch('http://example.com');
88
50
  console.log('Fetch status:', res.status);
89
51
  }
90
52
 
91
53
  const prepare = () => {
54
+ // This always run first
92
55
  console.log('Run before fetch:', performance.now().toFixed(2));
56
+ latch.open(startFetch);
93
57
  }
94
58
 
95
- const main = async () => {
96
- const p = task();
97
- await cio.sleep(500);
98
- prepare();
99
-
100
- // Allows all previously blocked tasks to run
101
- latch.open(fetchLatch);
102
-
103
- // Reclose the latch
104
- // Tasks that aren't blocked yet will be blocked
105
- latch.reset(fetchLatch);
106
-
107
- return p;
108
- }
109
-
110
- // Run fetch after 500ms
111
- await main();
112
-
113
- // Run fetch after another 500ms
114
- await main();
59
+ task();
60
+ prepare();
115
61
  ```
116
62
 
117
63
  ## Pubsub
@@ -150,6 +96,44 @@ cio.concurrent(3, async (id: number) => {
150
96
  publisher();
151
97
  ```
152
98
 
99
+ ## Channel
100
+ Channel is a synchronization primitive via message passing. A message may be sent over a channel, and another process is able to receive messages sent over a channel it has a reference to.
101
+
102
+ ```ts
103
+ import * as channel from 'ciorent/channel';
104
+ import * as cio from 'ciorent';
105
+
106
+ const c = channel.init<number>();
107
+
108
+ const run = async () => {
109
+ for (let i = 0; i < 5; i++) {
110
+ await cio.sleep(100);
111
+ channel.send(c, i);
112
+ console.log('Sent', i);
113
+ }
114
+
115
+ // Resolve all waiting promises with `undefined`
116
+ // This is a way to tell the reciever to not listen to more data
117
+ channel.flush(c);
118
+ };
119
+
120
+ const log = async () => {
121
+ while (true) {
122
+ // Wait until a value is sent to the channel
123
+ const x = await channel.recieve(c);
124
+ if (x == null) break;
125
+
126
+ console.log('Recieved', x);
127
+ };
128
+ }
129
+
130
+ log();
131
+ run();
132
+
133
+ // This runs first
134
+ console.log('Starting...');
135
+ ```
136
+
153
137
  ## Semaphore
154
138
  Semaphore is a concurrency primitive used to control access to a common resource by multiple processes.
155
139
 
@@ -157,22 +141,26 @@ Semaphore is a concurrency primitive used to control access to a common resource
157
141
  import * as semaphore from 'ciorent/semaphore';
158
142
  import * as cio from 'ciorent';
159
143
 
160
- const task = semaphore.wrap(
161
- async (task: number) => {
162
- console.log('Task', task, 'started');
144
+ // Only allow 2 task to run concurrently
145
+ const sem = semaphore.init(2);
146
+
147
+ const task = async (id: number) => {
148
+ // Acquire the semaphore or wait for the semaphore to be available
149
+ await semaphore.pause(sem);
163
150
 
164
- for (let i = 1; i <= 5; i++)
165
- await cio.pause;
151
+ console.log('Task', id, 'started');
166
152
 
167
- console.log('Task', task, 'end');
168
- }
169
- );
153
+ // Let the main thread schedules other tasks
154
+ for (let i = 1; i <= 5; i++) await cio.pause;
170
155
 
171
- // Only allow 2 task to run concurrently
172
- const sem = semaphore.init(2);
156
+ console.log('Task', id, 'end');
157
+
158
+ // Release the semaphore
159
+ semaphore.signal(sem);
160
+ }
173
161
 
174
- // Try to run 6 tasks concurrently
175
- cio.concurrent(6, (sem, id) => task(sem, id), sem);
162
+ // Try to run 5 tasks concurrently
163
+ cio.concurrent(5, task);
176
164
  ```
177
165
 
178
166
  ## Utilities
@@ -185,9 +173,11 @@ import * as cio from 'ciorent';
185
173
  const task1 = async () => {
186
174
  let x = 0;
187
175
 
188
- // Pause to let task2 to run
176
+ // Yield control back to the runtime, allowing it to
177
+ // schedule other tasks
189
178
  await cio.pause;
190
179
 
180
+ // Simulate heavy operation
191
181
  for (let i = 0; i < (Math.random() + 15) * 1e6; i++)
192
182
  x += Math.random() * 32 + i * Math.round(Math.random() * 16);
193
183
 
@@ -196,11 +186,8 @@ const task1 = async () => {
196
186
 
197
187
  // Short async task
198
188
  const task2 = async () => {
199
- console.log('Fetch start', performance.now().toFixed(2) + 'ms');
200
-
201
- // This will pause task2 to let task1 to continue running
189
+ console.log('Start fetching...');
202
190
  const txt = await fetch('http://example.com');
203
-
204
191
  console.log('Fetch status', txt.status);
205
192
  };
206
193
 
@@ -214,13 +201,18 @@ Cross-runtime synchronous and asynchronous sleep functions.
214
201
  ```ts
215
202
  import * as cio from 'ciorent';
216
203
 
204
+ const logTime = (label: string) => console.log(label + ':', Math.floor(performance.now()) + 'ms');
205
+
206
+ logTime('Start');
207
+
208
+ // Non-blocking
217
209
  await cio.sleep(500);
218
- console.log('Hi');
210
+ logTime('After about 0.5s');
219
211
 
220
- // This blocks the current thread
221
- // On the browser this only works in workers
212
+ // This blocks the event loop
213
+ // On the browser this only works in workers and blocks the worker thread
222
214
  cio.sleepSync(500);
223
- console.log('Hi');
215
+ logTime('After another 0.5s');
224
216
  ```
225
217
 
226
218
  ### Spawning tasks
@@ -263,7 +255,7 @@ import * as cio from 'ciorent';
263
255
 
264
256
  // Allow 1 call in 500ms
265
257
  const fn = cio.rateLimit((id: number) => {
266
- console.log('ID:', id);
258
+ console.log('Call ' + id + ':', Math.floor(performance.now()) + 'ms');
267
259
  }, 500, 1);
268
260
 
269
261
  fn(1); // fn(1) gets executed
package/index.d.ts CHANGED
@@ -2,7 +2,7 @@
2
2
  * @module Other utilities
3
3
  */
4
4
  /**
5
- * Continue running the function on next microtask.
5
+ * Yield back to main thread.
6
6
  *
7
7
  * You can `await` this **occasionally** in an expensive synchronous operation to avoid
8
8
  *
@@ -10,13 +10,15 @@
10
10
  */
11
11
  export declare const pause: Promise<void>;
12
12
  /**
13
- * Sleep for a duration
13
+ * Sleep for a duration.
14
14
  * @param ms - Sleep duration in milliseconds
15
15
  */
16
16
  export declare const sleep: (ms: number) => Promise<void>;
17
17
  /**
18
18
  * Sleep for a duration synchronously.
19
19
  *
20
+ * This method blocks the current thread.
21
+ *
20
22
  * On the browser it only works in workers.
21
23
  * @param ms - Sleep duration in milliseconds
22
24
  */
package/index.js CHANGED
@@ -1 +1 @@
1
- export let pause=Promise.resolve();export let sleep=globalThis.Bun?.sleep??globalThis.process?.getBuiltinModule?.("timers/promises").setTimeout??((ms)=>new Promise((res)=>{setTimeout(res,ms)}));let sharedBuf=new Int32Array(new SharedArrayBuffer(4));export let sleepSync=globalThis.Bun?.sleepSync??((ms)=>{Atomics.wait(sharedBuf,0,0,ms)});export let sequential=async(n,task,...args)=>{for(let i=0;i<n;i++)await task(...args,i)};export let concurrent=(n,task,...args)=>{let arr=new Array(n);for(let i=0;i<n;i++)arr[i]=task(...args,i);return Promise.all(arr)};export let debounce=(f,ms)=>{let id;return(...a)=>{clearTimeout(id);id=setTimeout(f,ms,...a)}};export let rateLimit=(f,ms,limit)=>{let call=()=>{limit++};return(...a)=>{if(limit>0){limit--;f(...a);setTimeout(call,ms)}}};export let throttle=(f,ms,limit)=>{let head=[null];let tail=head;let unlock=()=>{if(tail!==head){tail=tail[0];tail[1](f(...tail[2]));setTimeout(unlock,ms)}else limit++};return(...a)=>{if(limit===0){let r;let p=new Promise((res)=>{r=res});head=head[0]=[null,r,a];return p}limit--;setTimeout(unlock,ms);return f(...a)}};
1
+ export let pause=Promise.resolve();export let sleep=globalThis.Bun?.sleep??globalThis.process?.getBuiltinModule?.("timers/promises").setTimeout??((ms)=>new Promise((res)=>{setTimeout(res,ms)}));let sharedBuf=new Int32Array(new SharedArrayBuffer(4));export let sleepSync=globalThis.Bun?.sleepSync??((ms)=>{Atomics.wait(sharedBuf,0,0,ms)});export let sequential=async(n,task,...args)=>{for(let i=0;i<n;i++)await task(...args,i)};export let concurrent=(n,task,...args)=>{let arr=new Array(n);for(let i=0;i<n;i++)arr[i]=task(...args,i);return Promise.all(arr)};export let debounce=(f,ms)=>{let id;return(...a)=>{clearTimeout(id);id=setTimeout(f,ms,...a)}};export let rateLimit=(f,ms,limit)=>{let cur=limit;let unlock=()=>{cur=limit};return(...a)=>{if(cur>0){if(cur===1)setTimeout(unlock,ms);cur--;f(...a)}}};export let throttle=(f,ms,limit)=>{let head=[null];let tail=head;let cur=limit;let unlock=()=>{cur=limit;while(cur>0){if(tail===head)return;cur--;tail=tail[0];tail[1](f(...tail[2]))}setTimeout(unlock,ms)};return(...a)=>{if(cur===1){setTimeout(unlock,ms)}else if(cur===0){let r;let p=new Promise((res)=>{r=res});head=head[0]=[null,r,a];return p}cur--;return f(...a)}};
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "ciorent",
3
- "version": "0.1.0",
3
+ "version": "0.1.1",
4
4
  "description": "A lightweight, low-overhead concurrency library",
5
5
  "homepage": "https://ciorent.netlify.app",
6
6
  "repository": {
@@ -18,14 +18,14 @@
18
18
  "main": "./index.js",
19
19
  "types": "./index.d.ts",
20
20
  "exports": {
21
- "./fixed-queue": "./fixed-queue.js",
22
21
  "./sliding-queue": "./sliding-queue.js",
23
- "./fiber": "./fiber.js",
24
- "./latch": "./latch.js",
25
- "./dropping-queue": "./dropping-queue.js",
26
- "./topic": "./topic.js",
27
22
  "./semaphore": "./semaphore.js",
23
+ "./topic": "./topic.js",
28
24
  ".": "./index.js",
29
- "./channel": "./channel.js"
25
+ "./latch": "./latch.js",
26
+ "./channel": "./channel.js",
27
+ "./fixed-queue": "./fixed-queue.js",
28
+ "./dropping-queue": "./dropping-queue.js",
29
+ "./fiber": "./fiber.js"
30
30
  }
31
31
  }