ciorent 0.1.3 → 0.1.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -1,81 +1,109 @@
1
1
  A lightweight, low-overhead concurrency library.
2
- ## Semaphore
3
- Semaphore is a concurrency primitive used to control access to a common resource by multiple processes.
2
+
3
+ ## Features
4
+ - Micro-optimized utilities.
5
+ - Performance-oriented API design.
6
+ - Small bundle size.
7
+ - Fully type-safe.
8
+
9
+ ## Examples
10
+ ### Fibers
11
+ Virtual threads with more controlled execution.
4
12
 
5
13
  ```ts
6
- import * as semaphore from 'ciorent/semaphore';
7
- import * as cio from 'ciorent';
14
+ import * as co from 'ciorent';
15
+ import * as fiber from 'ciorent/fiber';
8
16
 
9
- // Only allow 2 task to run concurrently
10
- const sem = semaphore.init(2);
17
+ const f1 = fiber.fn(function* () {
18
+ // Wait for a promise
19
+ yield co.sleep(1000);
11
20
 
12
- const task = async (id: number) => {
13
- // Acquire the semaphore or wait for the semaphore to be available
14
- await semaphore.pause(sem);
21
+ // Wait for a promise and return its result
22
+ const res = yield* fiber.unwrap(Promise.resolve(1));
23
+ console.log('Fiber 1 recieved:', res);
15
24
 
16
- console.log('Task', id, 'started');
25
+ return Math.random();
26
+ });
17
27
 
18
- // Let the main thread schedules other tasks
19
- for (let i = 1; i <= 5; i++) await cio.pause;
28
+ {
29
+ const main = fiber.spawn(function* (proc) {
30
+ // Start f1, wait for it to finish and get the result
31
+ const res = yield* fiber.join(fiber.spawn(f1));
32
+ console.log('Fiber 2 recieved:', res);
20
33
 
21
- console.log('Task', id, 'end');
34
+ // Start f1 and make its lifetime depends on current fiber
35
+ const childProc = fiber.spawn(f1);
36
+ fiber.mount(childProc, proc);
37
+ });
22
38
 
23
- // Release the semaphore
24
- semaphore.signal(sem);
39
+ console.log('Fiber 2 started:', fiber.resumed(main));
40
+
41
+ // Pause the current fiber process
42
+ fiber.pause(main);
43
+ console.log('Fiber 2 is paused:', fiber.paused(main));
44
+
45
+ // Resume the fiber
46
+ fiber.resume(main);
47
+ console.log('Fiber 2 is resumed:', fiber.resumed(main));
48
+
49
+ // Wait for the fiber process to finish
50
+ await fiber.done(main);
51
+
52
+ // Check finish status
53
+ console.log('Fiber 2 completed', fiber.completed(main));
25
54
  }
26
55
 
27
- // Try to run 5 tasks concurrently
28
- cio.concurrent(5, task);
56
+ {
57
+ console.log('------------------------');
58
+
59
+ const main = fiber.spawn(f1);
60
+ console.log('Fiber 1 started:', fiber.resumed(main));
61
+
62
+ // Stop a fiber
63
+ fiber.stop(main);
64
+ console.log('Fiber 1 stopped:', fiber.stopped(main));
65
+ }
29
66
  ```
30
67
 
31
- ## Pubsub
32
- A fast, simple publish-subscribe API.
68
+ ### Latch
69
+ Latch is a synchronization primitive that allows one process to wait until another completes an operation before continuing execution.
33
70
 
34
71
  ```ts
35
- import * as topic from 'ciorent/topic';
36
- import * as cio from 'ciorent';
72
+ import * as latch from 'ciorent/latch';
37
73
 
38
- const messages = topic.init<number>();
74
+ const startFetch = latch.init();
39
75
 
40
- // A task that publish messages
41
- const publisher = async () => {
42
- for (let i = 0; i < 3; i++) {
43
- await cio.sleep(100);
44
- topic.pub(messages, i);
45
- }
76
+ const task = async () => {
77
+ // Blocks until the latch is open
78
+ await latch.pause(startFetch);
46
79
 
47
- // Resolve all waiting promises
48
- // And clear the value queue
49
- topic.flush(messages);
80
+ console.log('Start fetching...');
81
+ const res = await fetch('http://example.com');
82
+ console.log('Fetch status:', res.status);
50
83
  }
51
84
 
52
- // Spawn 3 tasks that recieve messages
53
- cio.concurrent(3, async (id: number) => {
54
- const sub = topic.sub(messages);
55
-
56
- while (true) {
57
- // Block until the value is sent
58
- const x = await topic.recieve(sub);
59
- if (x == null) break;
60
- console.log(`Task ${id} recieved: ${x}`);
61
- }
62
- });
85
+ const prepare = () => {
86
+ // This always run first
87
+ console.log('Run before fetch:', performance.now().toFixed(2));
88
+ latch.open(startFetch);
89
+ }
63
90
 
64
- publisher();
91
+ task();
92
+ prepare();
65
93
  ```
66
94
 
67
- ## Channel
95
+ ### Channel
68
96
  Channel is a synchronization primitive via message passing. A message may be sent over a channel, and another process is able to receive messages sent over a channel it has a reference to.
69
97
 
70
98
  ```ts
71
99
  import * as channel from 'ciorent/channel';
72
- import * as cio from 'ciorent';
100
+ import * as co from 'ciorent';
73
101
 
74
102
  const c = channel.init<number>();
75
103
 
76
104
  const run = async () => {
77
105
  for (let i = 0; i < 5; i++) {
78
- await cio.sleep(100);
106
+ await co.sleep(100);
79
107
  channel.send(c, i);
80
108
  console.log('Sent', i);
81
109
  }
@@ -102,70 +130,76 @@ run();
102
130
  console.log('Starting...');
103
131
  ```
104
132
 
105
- ## Latch
106
- Latch is a synchronization primitive that allows one process to wait until another completes an operation before continuing execution.
133
+ ### Pubsub
134
+ A fast, simple publish-subscribe API.
107
135
 
108
136
  ```ts
109
- import * as latch from 'ciorent/latch';
137
+ import * as topic from 'ciorent/topic';
138
+ import * as co from 'ciorent';
110
139
 
111
- const startFetch = latch.init();
140
+ const messages = topic.init<number>();
112
141
 
113
- const task = async () => {
114
- // Blocks until the latch is open
115
- await latch.pause(startFetch);
142
+ // A task that publish messages
143
+ const publisher = async () => {
144
+ for (let i = 0; i < 3; i++) {
145
+ await co.sleep(100);
146
+ topic.publish(messages, i);
147
+ }
116
148
 
117
- console.log('Start fetching...');
118
- const res = await fetch('http://example.com');
119
- console.log('Fetch status:', res.status);
149
+ // Resolve all waiting promises
150
+ // And clear the value queue
151
+ topic.flush(messages);
120
152
  }
121
153
 
122
- const prepare = () => {
123
- // This always run first
124
- console.log('Run before fetch:', performance.now().toFixed(2));
125
- latch.open(startFetch);
126
- }
154
+ // Spawn 3 tasks that recieve messages
155
+ co.spawn(3, async (id: number) => {
156
+ const sub = topic.subscribe(messages);
127
157
 
128
- task();
129
- prepare();
158
+ while (true) {
159
+ // Block until the value is sent
160
+ const x = await topic.recieve(sub);
161
+ if (x == null) break;
162
+ console.log(`Task ${id} recieved: ${x}`);
163
+ }
164
+ });
165
+
166
+ publisher();
130
167
  ```
131
168
 
132
- ## Fibers
133
- Virtual threads with more controlled execution.
169
+ ### Semaphore
170
+ Semaphore is a concurrency primitive used to control access to a common resource by multiple processes.
134
171
 
135
172
  ```ts
136
- import * as cio from 'ciorent';
137
- import * as fiber from 'ciorent/fiber';
173
+ import * as semaphore from 'ciorent/semaphore';
174
+ import * as co from 'ciorent';
138
175
 
139
- const f1 = fiber.fn(function* () {
140
- console.log('Fiber 1 started');
176
+ // Only allow 2 task to run concurrently
177
+ const sem = semaphore.init(2);
141
178
 
142
- // Wait for a promise
143
- yield cio.sleep(1000);
179
+ const task = async (id: number) => {
180
+ // Acquire the semaphore or wait for the semaphore to be available
181
+ await semaphore.pause(sem);
144
182
 
145
- console.log('Fiber 1 done');
146
- return Math.random();
147
- });
183
+ console.log('Task', id, 'started');
148
184
 
149
- fiber.spawn(function* (proc) {
150
- console.log('Fiber 2 started');
185
+ // Let the main thread schedules other tasks
186
+ for (let i = 1; i <= 5; i++) await co.pause;
151
187
 
152
- // Start f1, wait for it to finish and get the result
153
- const res = yield* fiber.join(fiber.spawn(f1));
154
- console.log('Fiber 1 result:', res);
188
+ console.log('Task', id, 'end');
155
189
 
156
- // Start f1 and make its lifetime depends on current fiber
157
- fiber.mount(fiber.spawn(f1), proc);
190
+ // Release the semaphore
191
+ semaphore.signal(sem);
192
+ }
158
193
 
159
- // The runtime will interrupt f1
160
- console.log('Fiber 2 done');
161
- });
194
+ // Try to run 5 tasks concurrently
195
+ co.spawn(5, task);
162
196
  ```
163
197
 
164
- ## Utilities
165
- ### Pausing
198
+ ### Utilities
199
+ #### Pausing
166
200
  Delay the execution of a function for other asynchronous tasks to run.
167
201
  ```ts
168
- import * as cio from 'ciorent';
202
+ import * as co from 'ciorent';
169
203
 
170
204
  // Expensive sync task
171
205
  const task1 = async () => {
@@ -173,7 +207,7 @@ const task1 = async () => {
173
207
 
174
208
  // Yield control back to the runtime, allowing it to
175
209
  // schedule other tasks
176
- await cio.pause;
210
+ await co.pause;
177
211
 
178
212
  // Simulate heavy operation
179
213
  for (let i = 0; i < (Math.random() + 15) * 1e6; i++)
@@ -194,85 +228,68 @@ task1();
194
228
  task2();
195
229
  ```
196
230
 
197
- ### Sleep
231
+ #### Sleep
198
232
  Cross-runtime synchronous and asynchronous sleep functions.
199
233
  ```ts
200
- import * as cio from 'ciorent';
234
+ import * as co from 'ciorent';
201
235
 
202
236
  const logTime = (label: string) => console.log(label + ':', Math.floor(performance.now()) + 'ms');
203
237
 
204
238
  logTime('Start');
205
239
 
206
240
  // Non-blocking
207
- await cio.sleep(500);
241
+ await co.sleep(500);
208
242
  logTime('After about 0.5s');
209
243
 
210
244
  // This blocks the event loop
211
245
  // On the browser this only works in workers and blocks the worker thread
212
- cio.sleepSync(500);
246
+ co.sleepSync(500);
213
247
  logTime('After another 0.5s');
214
248
  ```
215
249
 
216
- ### Spawning tasks
250
+ #### Spawning tasks
217
251
  Utilities to create and run tasks.
218
252
  ```ts
219
- import * as cio from 'ciorent';
253
+ import * as co from 'ciorent';
220
254
 
221
255
  const task = async (id: number) => {
222
- await cio.sleep((10 - id) * 20 + 50);
256
+ await co.sleep((10 - id) * 20 + 50);
223
257
  console.log('Task', id, 'done');
224
258
  }
225
259
 
226
260
  // Spawn and run 5 tasks sequentially
227
261
  console.log('Running 5 tasks sequentially:');
228
- await cio.sequential(5, task);
262
+ await co.sequential(5, task);
229
263
 
230
264
  // Spawn and run 5 tasks concurrently
231
265
  console.log('Running 5 tasks concurrently:');
232
- await cio.concurrent(5, task);
266
+ await Promise.all(co.spawn(5, task));
233
267
  ```
234
268
 
235
- ### Debounce
269
+ #### Debounce
236
270
  Postpones execution until after an idle period.
237
271
  ```ts
238
- import * as cio from 'ciorent';
272
+ import * as co from 'ciorent';
239
273
 
240
- const fn = cio.debounce((id: number) => {
274
+ const fn = co.debounce((id: number) => {
241
275
  console.log('ID:', id);
242
276
  }, 500);
243
277
 
244
278
  fn(1); // fn(1) gets skipped
245
- await cio.sleep(100);
279
+ await co.sleep(100);
246
280
  fn(2); // fn(2) gets executed
247
281
  ```
248
282
 
249
- ### Rate Limit
250
- Limits the number of calls within a time window.
251
- ```ts
252
- import * as cio from 'ciorent';
253
-
254
- // Allow 2 calls in 500ms, other calls are dropped
255
- const fn = cio.rateLimit((id: number) => {
256
- console.log('Call ' + id + ':', Math.floor(performance.now()) + 'ms');
257
- }, 500, 2);
258
-
259
- // Some calls will be dropped
260
- for (let i = 0; i < 8; i++) {
261
- fn(i);
262
- await cio.sleep(400);
263
- }
264
- ```
265
-
266
- ### Throttle
283
+ #### Throttle
267
284
  Executes a function at a regular interval.
268
285
  ```ts
269
- import * as cio from 'ciorent';
286
+ import * as co from 'ciorent';
270
287
 
271
288
  // Allow 2 calls in 500ms
272
- const fn = cio.throttle((id: number) => {
289
+ const fn = co.throttle((id: number) => {
273
290
  console.log(id + ': ' + Math.floor(performance.now()) + 'ms');
274
291
  }, 500, 2);
275
292
 
276
- cio.concurrent(8, fn);
293
+ co.spawn(8, fn);
277
294
  ```
278
295
 
package/fiber.d.ts CHANGED
@@ -27,19 +27,19 @@ export interface Process<TReturn = unknown> {
27
27
  */
28
28
  export type Runtime = <const TReturn, const Args extends any[]>(gen: (proc: Process<TReturn>, ...args: Args) => Generator<any, TReturn>, ...args: Args) => Process<TReturn>;
29
29
  /**
30
- * Check whether the fiber is paused
30
+ * Check whether the fiber has been paused
31
31
  */
32
32
  export declare const paused: (t: Process) => boolean;
33
33
  /**
34
34
  * Check whether the fiber is running
35
35
  */
36
- export declare const running: (t: Process) => boolean;
36
+ export declare const resumed: (t: Process) => boolean;
37
37
  /**
38
- * Check whether the fiber is finished
38
+ * Check whether the fiber has completed
39
39
  */
40
- export declare const done: (t: Process) => boolean;
40
+ export declare const completed: (t: Process) => boolean;
41
41
  /**
42
- * Check whether the fiber is interrupted
42
+ * Check whether the fiber has been interrupted
43
43
  */
44
44
  export declare const stopped: (t: Process) => boolean;
45
45
  /**
@@ -76,7 +76,7 @@ export declare function join<T extends Process>(t: T): Generator<Awaited<T[0]>,
76
76
  * Wait for a fiber to finish and retrieve its result
77
77
  * @param t
78
78
  */
79
- export declare const finish: <T extends Process>(t: T) => T[3];
79
+ export declare const done: <T extends Process>(t: T) => T[0];
80
80
  /**
81
81
  * Mount child fiber lifetime to parent lifetime
82
82
  * @param child
package/fiber.js CHANGED
@@ -1 +1 @@
1
- export let paused=(t)=>t[1]===0;export let running=(t)=>t[1]===1;export let done=(t)=>t[1]===2;export let stopped=(t)=>t[1]===3;let invoke=async(g,thread)=>{try{let t=g.next();while(!t.done){let v=await t.value;if(thread[1]===0){let r;let p=new Promise((res)=>{r=res});thread[2]=r;await p}if(thread[1]===3)return;t=g.next(v)}thread[1]=2;return t.value}finally{thread[3].forEach(stop)}};export let fn=(f)=>f;export let spawn=(f,...args)=>{let thread=[null,1,null,[]];thread[0]=invoke(f(thread,...args),thread);return thread};export let pause=(t)=>{if(t[1]===1)t[1]=0};export let resume=(t)=>{if(t[1]===0){t[1]=1;t[2]?.()}};export let stop=(t)=>{if(t[1]===0)t[2]?.();t[1]=3};export function*join(t){return yield t[0]}export let finish=(t)=>t[3];export let mount=(child,parent)=>{parent[3].push(child)};export let control=(t,signal)=>{signal.addEventListener("abort",()=>{stop(t)})};export function*unwrap(t){return yield t}
1
+ export let paused=(t)=>t[1]===0;export let resumed=(t)=>t[1]===1;export let completed=(t)=>t[1]===2;export let stopped=(t)=>t[1]===3;let invoke=async(g,thread)=>{try{let t=g.next();while(!t.done){let v=await t.value;if(thread[1]===0){let r;let p=new Promise((res)=>{r=res});thread[2]=r;await p}if(thread[1]===3)return;t=g.next(v)}thread[1]=2;return t.value}finally{if(thread[1]!==2)thread[1]=3;thread[3].forEach(stop)}};export let fn=(f)=>f;export let spawn=(f,...args)=>{let thread=[null,1,null,[]];thread[0]=invoke(f(thread,...args),thread);return thread};export let pause=(t)=>{if(t[1]===1)t[1]=0};export let resume=(t)=>{if(t[1]===0){t[1]=1;t[2]?.()}};export let stop=(t)=>{if(t[1]!==2){if(t[1]===0)t[2]?.();t[1]=3}};export function*join(t){return yield t[0]}export let done=(t)=>t[0];export let mount=(child,parent)=>{parent[3].push(child)};export let control=(t,signal)=>{signal.addEventListener("abort",()=>{stop(t)})};export function*unwrap(t){return yield t}
package/index.d.ts CHANGED
@@ -30,24 +30,17 @@ export declare const sleepSync: (ms: number) => void;
30
30
  */
31
31
  export declare const sequential: <const T extends any[]>(n: number, task: (...args: [...T, id: number]) => Promise<any>, ...args: T) => Promise<void>;
32
32
  /**
33
- * Spawn n tasks that runs concurrently
33
+ * Spawn n concurrent tasks
34
34
  * @param n
35
35
  * @param task - The function to run
36
36
  */
37
- export declare const concurrent: <const T extends any[], const R>(n: number, task: (...args: [...T, id: number]) => Promise<R>, ...args: T) => Promise<R[]>;
37
+ export declare const spawn: <const T extends any[], const R>(n: number, task: (...args: [...T, id: number]) => Promise<R>, ...args: T) => Promise<R>[];
38
38
  /**
39
39
  * Drop function calls until it doesn't get called for a specific period.
40
40
  * @param f - The target function to debounce (it must not throw errors)
41
41
  * @param ms - The time period in milliseconds
42
42
  */
43
43
  export declare const debounce: <const Args extends any[]>(f: (...args: Args) => any, ms: number) => ((...args: Args) => void);
44
- /**
45
- * Drop function calls for a specific period
46
- * @param f - The target function to rate limit (it must not throw errors)
47
- * @param ms - The time period in milliseconds
48
- * @param limit - The call limit in the time period
49
- */
50
- export declare const rateLimit: <const Args extends any[]>(f: (...args: Args) => any, ms: number, limit: number) => ((...args: Args) => void);
51
44
  /**
52
45
  * Throttle function execution for a time period
53
46
  * @param f - The function to throttle (it must not throw errors)
package/index.js CHANGED
@@ -1 +1 @@
1
- export let pause=Promise.resolve();export let sleep=globalThis.Bun?.sleep??globalThis.process?.getBuiltinModule?.("timers/promises").setTimeout??((ms)=>new Promise((res)=>{setTimeout(res,ms)}));let sharedBuf=new Int32Array(new SharedArrayBuffer(4));export let sleepSync=globalThis.Bun?.sleepSync??((ms)=>{Atomics.wait(sharedBuf,0,0,ms)});export let sequential=async(n,task,...args)=>{for(let i=0;i<n;i++)await task(...args,i)};export let concurrent=(n,task,...args)=>{let arr=new Array(n);for(let i=0;i<n;i++)arr[i]=task(...args,i);return Promise.all(arr)};export let debounce=(f,ms)=>{let id;return(...a)=>{clearTimeout(id);id=setTimeout(f,ms,...a)}};export let rateLimit=(f,ms,limit)=>{let cur=limit;let unlock=()=>{cur=limit};return(...a)=>{if(cur>0){if(cur===1)setTimeout(unlock,ms);cur--;f(...a)}}};export let throttle=(f,ms,limit)=>{let head=[null];let tail=head;let cur=limit;let unlock=()=>{cur=limit;while(cur>0){if(tail===head)return;cur--;tail=tail[0];tail[1](f(...tail[2]))}setTimeout(unlock,ms)};return(...a)=>{if(cur===1){setTimeout(unlock,ms)}else if(cur===0){let r;let p=new Promise((res)=>{r=res});head=head[0]=[null,r,a];return p}cur--;return f(...a)}};
1
+ export let pause=Promise.resolve();export let sleep=globalThis.Bun?.sleep??globalThis.process?.getBuiltinModule?.("timers/promises").setTimeout??((ms)=>new Promise((res)=>{setTimeout(res,ms)}));let sharedBuf=new Int32Array(new SharedArrayBuffer(4));export let sleepSync=globalThis.Bun?.sleepSync??((ms)=>{Atomics.wait(sharedBuf,0,0,ms)});export let sequential=async(n,task,...args)=>{for(let i=0;i<n;i++)await task(...args,i)};export let spawn=(n,task,...args)=>{let arr=new Array(n);for(let i=0;i<n;i++)arr[i]=task(...args,i);return arr};export let debounce=(f,ms)=>{let id;return(...a)=>{clearTimeout(id);id=setTimeout(f,ms,...a)}};export let throttle=(f,ms,limit)=>{let head=[null];let tail=head;let cur=limit;let scheduled=false;let unlock=()=>{cur=limit;while(cur>0){if(tail===head){scheduled=false;return}cur--;tail=tail[0];tail[1](f(...tail[2]))}setTimeout(unlock,ms)};return(...a)=>{if(cur===0){let r;let p=new Promise((res)=>{r=res});head=head[0]=[null,r,a];return p}if(!scheduled){scheduled=true;setTimeout(unlock,ms)}cur--;return f(...a)}};
package/latch.d.ts CHANGED
@@ -18,6 +18,6 @@ export declare const pause: (latch: Latch) => Promise<void>;
18
18
  */
19
19
  export declare const open: (latch: Latch) => void;
20
20
  /**
21
- * Re-close a latch
21
+ * Close a latch
22
22
  */
23
- export declare const reset: (latch: Latch) => void;
23
+ export declare const close: (latch: Latch) => void;
package/latch.js CHANGED
@@ -1 +1 @@
1
- import{pause as endPromise}from"./index.js";export let init=()=>{let r;return[new Promise((res)=>{r=res}),r]};export let pause=(latch)=>latch[0];export let open=(latch)=>{latch[1]();latch[0]=endPromise};export let reset=(latch)=>{if(latch[0]===endPromise){let r;latch[0]=new Promise((res)=>{r=res});latch[1]=r}};
1
+ import{pause as endPromise}from"./index.js";export let init=()=>{let r;return[new Promise((res)=>{r=res}),r]};export let pause=(latch)=>latch[0];export let open=(latch)=>{latch[1]();latch[0]=endPromise};export let close=(latch)=>{if(latch[0]===endPromise){let r;latch[0]=new Promise((res)=>{r=res});latch[1]=r}};
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "ciorent",
3
- "version": "0.1.3",
3
+ "version": "0.1.5",
4
4
  "description": "A lightweight, low-overhead concurrency library",
5
5
  "homepage": "https://ciorent.netlify.app",
6
6
  "repository": {
@@ -18,14 +18,14 @@
18
18
  "main": "./index.js",
19
19
  "types": "./index.d.ts",
20
20
  "exports": {
21
- "./sliding-queue": "./sliding-queue.js",
22
21
  "./fixed-queue": "./fixed-queue.js",
22
+ "./sliding-queue": "./sliding-queue.js",
23
+ "./latch": "./latch.js",
24
+ "./channel": "./channel.js",
23
25
  "./fiber": "./fiber.js",
24
- ".": "./index.js",
25
- "./semaphore": "./semaphore.js",
26
26
  "./dropping-queue": "./dropping-queue.js",
27
+ ".": "./index.js",
27
28
  "./topic": "./topic.js",
28
- "./channel": "./channel.js",
29
- "./latch": "./latch.js"
29
+ "./semaphore": "./semaphore.js"
30
30
  }
31
31
  }
package/topic.d.ts CHANGED
@@ -34,12 +34,12 @@ export interface Subscriber<T extends {}> {
34
34
  * Subscribe to a topic
35
35
  * @param t
36
36
  */
37
- export declare const sub: <T extends {}>(t: Topic<T>) => Subscriber<T>;
37
+ export declare const subscribe: <T extends {}>(t: Topic<T>) => Subscriber<T>;
38
38
  /**
39
- * Subscribe to a topic
39
+ * Publish to a topic
40
40
  * @param t
41
41
  */
42
- export declare const pub: <T extends {}>(t: Topic<T>, value: T) => void;
42
+ export declare const publish: <T extends {}>(t: Topic<T>, value: T) => void;
43
43
  /**
44
44
  * Resolve all waiting promises and clear all pending values
45
45
  * @param t
package/topic.js CHANGED
@@ -1 +1 @@
1
- export let init=()=>[[null],[],[]];export let sub=(t)=>[t,t[0]];export let pub=(t,value)=>{let head=t[0]=t[0][0]=[null,value];for(let i=0,res=t[1],subs=t[2];i<res.length;i++){res[i](value);subs[i][1]=head}t[1]=[];t[2]=[]};export let flush=(t)=>{let head=t[0]=[null];for(let i=0,res=t[1],subs=t[2];i<res.length;i++){res[i]();subs[i][1]=head}t[1]=[];t[2]=[]};export let poll=(t)=>t[1][0]!==null?(t[1]=t[1][0])[1]:undefined;export let recieve=(t)=>{if(t[1][0]!==null)return Promise.resolve((t[1]=t[1][0])[1]);let topic=t[0];topic[2].push(t);return new Promise((res)=>{topic[1].push(res)})};
1
+ export let init=()=>[[null],[],[]];export let subscribe=(t)=>[t,t[0]];export let publish=(t,value)=>{let head=t[0]=t[0][0]=[null,value];for(let i=0,res=t[1],subs=t[2];i<res.length;i++){res[i](value);subs[i][1]=head}t[1]=[];t[2]=[]};export let flush=(t)=>{let head=t[0]=[null];for(let i=0,res=t[1],subs=t[2];i<res.length;i++){res[i]();subs[i][1]=head}t[1]=[];t[2]=[]};export let poll=(t)=>t[1][0]!==null?(t[1]=t[1][0])[1]:undefined;export let recieve=(t)=>{if(t[1][0]!==null)return Promise.resolve((t[1]=t[1][0])[1]);let topic=t[0];topic[2].push(t);return new Promise((res)=>{topic[1].push(res)})};