@effect/cluster 0.37.2 → 0.38.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/ClusterCron/package.json +6 -0
- package/dist/cjs/ClusterCron.js +86 -0
- package/dist/cjs/ClusterCron.js.map +1 -0
- package/dist/cjs/ClusterSchema.js +9 -1
- package/dist/cjs/ClusterSchema.js.map +1 -1
- package/dist/cjs/ClusterWorkflowEngine.js +21 -6
- package/dist/cjs/ClusterWorkflowEngine.js.map +1 -1
- package/dist/cjs/Entity.js +6 -1
- package/dist/cjs/Entity.js.map +1 -1
- package/dist/cjs/EntityAddress.js +8 -1
- package/dist/cjs/EntityAddress.js.map +1 -1
- package/dist/cjs/MessageStorage.js +6 -4
- package/dist/cjs/MessageStorage.js.map +1 -1
- package/dist/cjs/Runner.js +15 -0
- package/dist/cjs/Runner.js.map +1 -1
- package/dist/cjs/RunnerAddress.js +8 -1
- package/dist/cjs/RunnerAddress.js.map +1 -1
- package/dist/cjs/Runners.js +5 -0
- package/dist/cjs/Runners.js.map +1 -1
- package/dist/cjs/ShardId.js +75 -7
- package/dist/cjs/ShardId.js.map +1 -1
- package/dist/cjs/ShardManager.js +63 -43
- package/dist/cjs/ShardManager.js.map +1 -1
- package/dist/cjs/ShardStorage.js +48 -35
- package/dist/cjs/ShardStorage.js.map +1 -1
- package/dist/cjs/Sharding.js +45 -37
- package/dist/cjs/Sharding.js.map +1 -1
- package/dist/cjs/ShardingConfig.js +9 -2
- package/dist/cjs/ShardingConfig.js.map +1 -1
- package/dist/cjs/Singleton.js +2 -2
- package/dist/cjs/Singleton.js.map +1 -1
- package/dist/cjs/SingletonAddress.js +2 -2
- package/dist/cjs/SingletonAddress.js.map +1 -1
- package/dist/cjs/SqlMessageStorage.js +32 -27
- package/dist/cjs/SqlMessageStorage.js.map +1 -1
- package/dist/cjs/SqlShardStorage.js +14 -14
- package/dist/cjs/SqlShardStorage.js.map +1 -1
- package/dist/cjs/index.js +3 -1
- package/dist/cjs/internal/entityManager.js +2 -1
- package/dist/cjs/internal/entityManager.js.map +1 -1
- package/dist/cjs/internal/shardManager.js +138 -37
- package/dist/cjs/internal/shardManager.js.map +1 -1
- package/dist/dts/ClusterCron.d.ts +37 -0
- package/dist/dts/ClusterCron.d.ts.map +1 -0
- package/dist/dts/ClusterSchema.d.ts +8 -0
- package/dist/dts/ClusterSchema.d.ts.map +1 -1
- package/dist/dts/ClusterWorkflowEngine.d.ts.map +1 -1
- package/dist/dts/Entity.d.ts +10 -0
- package/dist/dts/Entity.d.ts.map +1 -1
- package/dist/dts/EntityAddress.d.ts +9 -3
- package/dist/dts/EntityAddress.d.ts.map +1 -1
- package/dist/dts/MessageStorage.d.ts +3 -3
- package/dist/dts/MessageStorage.d.ts.map +1 -1
- package/dist/dts/Runner.d.ts +15 -0
- package/dist/dts/Runner.d.ts.map +1 -1
- package/dist/dts/RunnerAddress.d.ts +5 -0
- package/dist/dts/RunnerAddress.d.ts.map +1 -1
- package/dist/dts/Runners.d.ts.map +1 -1
- package/dist/dts/ShardId.d.ts +60 -6
- package/dist/dts/ShardId.d.ts.map +1 -1
- package/dist/dts/ShardManager.d.ts +13 -13
- package/dist/dts/ShardManager.d.ts.map +1 -1
- package/dist/dts/ShardStorage.d.ts +11 -14
- package/dist/dts/ShardStorage.d.ts.map +1 -1
- package/dist/dts/Sharding.d.ts +4 -2
- package/dist/dts/Sharding.d.ts.map +1 -1
- package/dist/dts/ShardingConfig.d.ts +32 -6
- package/dist/dts/ShardingConfig.d.ts.map +1 -1
- package/dist/dts/Singleton.d.ts +3 -1
- package/dist/dts/Singleton.d.ts.map +1 -1
- package/dist/dts/SingletonAddress.d.ts +4 -3
- package/dist/dts/SingletonAddress.d.ts.map +1 -1
- package/dist/dts/SqlMessageStorage.d.ts +3 -2
- package/dist/dts/SqlMessageStorage.d.ts.map +1 -1
- package/dist/dts/SqlShardStorage.d.ts +1 -1
- package/dist/dts/index.d.ts +4 -0
- package/dist/dts/index.d.ts.map +1 -1
- package/dist/esm/ClusterCron.js +77 -0
- package/dist/esm/ClusterCron.js.map +1 -0
- package/dist/esm/ClusterSchema.js +7 -0
- package/dist/esm/ClusterSchema.js.map +1 -1
- package/dist/esm/ClusterWorkflowEngine.js +21 -6
- package/dist/esm/ClusterWorkflowEngine.js.map +1 -1
- package/dist/esm/Entity.js +6 -1
- package/dist/esm/Entity.js.map +1 -1
- package/dist/esm/EntityAddress.js +8 -1
- package/dist/esm/EntityAddress.js.map +1 -1
- package/dist/esm/MessageStorage.js +6 -4
- package/dist/esm/MessageStorage.js.map +1 -1
- package/dist/esm/Runner.js +15 -0
- package/dist/esm/Runner.js.map +1 -1
- package/dist/esm/RunnerAddress.js +8 -1
- package/dist/esm/RunnerAddress.js.map +1 -1
- package/dist/esm/Runners.js +5 -0
- package/dist/esm/Runners.js.map +1 -1
- package/dist/esm/ShardId.js +73 -6
- package/dist/esm/ShardId.js.map +1 -1
- package/dist/esm/ShardManager.js +64 -45
- package/dist/esm/ShardManager.js.map +1 -1
- package/dist/esm/ShardStorage.js +47 -35
- package/dist/esm/ShardStorage.js.map +1 -1
- package/dist/esm/Sharding.js +45 -37
- package/dist/esm/Sharding.js.map +1 -1
- package/dist/esm/ShardingConfig.js +9 -2
- package/dist/esm/ShardingConfig.js.map +1 -1
- package/dist/esm/Singleton.js +2 -2
- package/dist/esm/Singleton.js.map +1 -1
- package/dist/esm/SingletonAddress.js +2 -2
- package/dist/esm/SingletonAddress.js.map +1 -1
- package/dist/esm/SqlMessageStorage.js +32 -27
- package/dist/esm/SqlMessageStorage.js.map +1 -1
- package/dist/esm/SqlShardStorage.js +14 -14
- package/dist/esm/SqlShardStorage.js.map +1 -1
- package/dist/esm/index.js +4 -0
- package/dist/esm/index.js.map +1 -1
- package/dist/esm/internal/entityManager.js +2 -1
- package/dist/esm/internal/entityManager.js.map +1 -1
- package/dist/esm/internal/shardManager.js +136 -36
- package/dist/esm/internal/shardManager.js.map +1 -1
- package/package.json +12 -4
- package/src/ClusterCron.ts +129 -0
- package/src/ClusterSchema.ts +9 -0
- package/src/ClusterWorkflowEngine.ts +37 -6
- package/src/Entity.ts +20 -1
- package/src/EntityAddress.ts +11 -1
- package/src/MessageStorage.ts +12 -7
- package/src/Runner.ts +18 -0
- package/src/RunnerAddress.ts +9 -1
- package/src/Runners.ts +5 -0
- package/src/ShardId.ts +81 -11
- package/src/ShardManager.ts +74 -45
- package/src/ShardStorage.ts +57 -49
- package/src/Sharding.ts +45 -39
- package/src/ShardingConfig.ts +36 -7
- package/src/Singleton.ts +5 -2
- package/src/SingletonAddress.ts +2 -2
- package/src/SqlMessageStorage.ts +36 -30
- package/src/SqlShardStorage.ts +15 -15
- package/src/index.ts +5 -0
- package/src/internal/entityManager.ts +2 -1
- package/src/internal/shardManager.ts +158 -52
package/src/ShardManager.ts
CHANGED
@@ -33,9 +33,9 @@ import type { Scope } from "effect/Scope"
|
|
33
33
|
import { RunnerNotRegistered } from "./ClusterError.js"
|
34
34
|
import * as ClusterMetrics from "./ClusterMetrics.js"
|
35
35
|
import {
|
36
|
+
addAllNested,
|
36
37
|
decideAssignmentsForUnassignedShards,
|
37
38
|
decideAssignmentsForUnbalancedShards,
|
38
|
-
RunnerWithMetadata,
|
39
39
|
State
|
40
40
|
} from "./internal/shardManager.js"
|
41
41
|
import * as MachineId from "./MachineId.js"
|
@@ -43,7 +43,7 @@ import { Runner } from "./Runner.js"
|
|
43
43
|
import { RunnerAddress } from "./RunnerAddress.js"
|
44
44
|
import { RunnerHealth } from "./RunnerHealth.js"
|
45
45
|
import { RpcClientProtocol, Runners } from "./Runners.js"
|
46
|
-
import { ShardId } from "./ShardId.js"
|
46
|
+
import { make as makeShardId, ShardId } from "./ShardId.js"
|
47
47
|
import { ShardingConfig } from "./ShardingConfig.js"
|
48
48
|
import { ShardStorage } from "./ShardStorage.js"
|
49
49
|
|
@@ -56,7 +56,7 @@ export class ShardManager extends Context.Tag("@effect/cluster/ShardManager")<Sh
|
|
56
56
|
* Get all shard assignments.
|
57
57
|
*/
|
58
58
|
readonly getAssignments: Effect.Effect<
|
59
|
-
|
59
|
+
Iterable<readonly [ShardId, Option.Option<RunnerAddress>]>
|
60
60
|
>
|
61
61
|
/**
|
62
62
|
* Get a stream of sharding events emit by the shard manager.
|
@@ -197,7 +197,7 @@ export const configFromEnv: Effect.Effect<Config["Type"], ConfigError> = configC
|
|
197
197
|
* @since 1.0.0
|
198
198
|
* @category Config
|
199
199
|
*/
|
200
|
-
export const layerConfig = (config?: Partial<Config["Type"]>): Layer.Layer<Config> =>
|
200
|
+
export const layerConfig = (config?: Partial<Config["Type"]> | undefined): Layer.Layer<Config> =>
|
201
201
|
Layer.succeed(Config, {
|
202
202
|
...Config.defaults,
|
203
203
|
...config
|
@@ -207,7 +207,8 @@ export const layerConfig = (config?: Partial<Config["Type"]>): Layer.Layer<Confi
|
|
207
207
|
* @since 1.0.0
|
208
208
|
* @category Config
|
209
209
|
*/
|
210
|
-
export const layerConfigFromEnv: Layer.Layer<Config, ConfigError>
|
210
|
+
export const layerConfigFromEnv = (config?: Partial<Config["Type"]> | undefined): Layer.Layer<Config, ConfigError> =>
|
211
|
+
Layer.effect(Config, config ? Effect.map(configFromEnv, (env) => ({ ...env, ...config })) : configFromEnv)
|
211
212
|
|
212
213
|
/**
|
213
214
|
* Represents a client which can be used to communicate with the
|
@@ -221,7 +222,7 @@ export class ShardManagerClient
|
|
221
222
|
/**
|
222
223
|
* Register a new runner with the cluster.
|
223
224
|
*/
|
224
|
-
readonly register: (address: RunnerAddress) => Effect.Effect<MachineId.MachineId>
|
225
|
+
readonly register: (address: RunnerAddress, groups: ReadonlyArray<string>) => Effect.Effect<MachineId.MachineId>
|
225
226
|
/**
|
226
227
|
* Unregister a runner from the cluster.
|
227
228
|
*/
|
@@ -234,7 +235,7 @@ export class ShardManagerClient
|
|
234
235
|
* Get all shard assignments.
|
235
236
|
*/
|
236
237
|
readonly getAssignments: Effect.Effect<
|
237
|
-
|
238
|
+
Iterable<readonly [ShardId, Option.Option<RunnerAddress>]>
|
238
239
|
>
|
239
240
|
/**
|
240
241
|
* Get a stream of sharding events emit by the shard manager.
|
@@ -287,7 +288,7 @@ export class Rpcs extends RpcGroup.make(
|
|
287
288
|
payload: { address: RunnerAddress }
|
288
289
|
}),
|
289
290
|
Rpc.make("GetAssignments", {
|
290
|
-
success: Schema.
|
291
|
+
success: Schema.Array(Schema.Tuple(ShardId, Schema.Option(RunnerAddress)))
|
291
292
|
}),
|
292
293
|
Rpc.make("ShardingEvents", {
|
293
294
|
success: ShardingEventSchema,
|
@@ -327,18 +328,26 @@ export const ShardingEvent = Data.taggedEnum<ShardingEvent>()
|
|
327
328
|
* @category Client
|
328
329
|
*/
|
329
330
|
export const makeClientLocal = Effect.gen(function*() {
|
330
|
-
const
|
331
|
+
const config = yield* ShardingConfig
|
331
332
|
const clock = yield* Effect.clock
|
332
333
|
|
333
|
-
const
|
334
|
-
|
335
|
-
shards.set(ShardId.make(n), runnerAddress.runnerAddress)
|
336
|
-
}
|
334
|
+
const groups = new Set<string>()
|
335
|
+
const shards = MutableHashMap.empty<ShardId, Option.Option<RunnerAddress>>()
|
337
336
|
|
338
337
|
let machineId = 0
|
339
338
|
|
340
339
|
return ShardManagerClient.of({
|
341
|
-
register: (
|
340
|
+
register: (_, groupsToAdd) =>
|
341
|
+
Effect.sync(() => {
|
342
|
+
for (const group of groupsToAdd) {
|
343
|
+
if (groups.has(group)) continue
|
344
|
+
groups.add(group)
|
345
|
+
for (let n = 1; n <= config.shardsPerGroup; n++) {
|
346
|
+
MutableHashMap.set(shards, makeShardId(group, n), config.runnerAddress)
|
347
|
+
}
|
348
|
+
}
|
349
|
+
return MachineId.make(++machineId)
|
350
|
+
}),
|
342
351
|
unregister: () => Effect.void,
|
343
352
|
notifyUnhealthyRunner: () => Effect.void,
|
344
353
|
getAssignments: Effect.succeed(shards),
|
@@ -367,7 +376,8 @@ export const makeClientRpc: Effect.Effect<
|
|
367
376
|
})
|
368
377
|
|
369
378
|
return ShardManagerClient.of({
|
370
|
-
register: (address
|
379
|
+
register: (address, groups) =>
|
380
|
+
client.Register({ runner: Runner.make({ address, version: config.serverVersion, groups }) }),
|
371
381
|
unregister: (address) => client.Unregister({ address }),
|
372
382
|
notifyUnhealthyRunner: (address) => client.NotifyUnhealthyRunner({ address }),
|
373
383
|
getAssignments: client.GetAssignments(),
|
@@ -417,13 +427,13 @@ export const make = Effect.gen(function*() {
|
|
417
427
|
const config = yield* Config
|
418
428
|
const shardingConfig = yield* ShardingConfig
|
419
429
|
|
420
|
-
const state = yield* Effect.orDie(State.fromStorage(shardingConfig.
|
430
|
+
const state = yield* Effect.orDie(State.fromStorage(shardingConfig.shardsPerGroup))
|
421
431
|
const scope = yield* Effect.scope
|
422
432
|
const events = yield* PubSub.unbounded<ShardingEvent>()
|
423
433
|
|
424
|
-
yield* Metric.incrementBy(ClusterMetrics.runners, MutableHashMap.size(state.
|
434
|
+
yield* Metric.incrementBy(ClusterMetrics.runners, MutableHashMap.size(state.allRunners))
|
425
435
|
|
426
|
-
for (const address of state.
|
436
|
+
for (const [, address] of state.assignments) {
|
427
437
|
const metric = Option.isSome(address) ?
|
428
438
|
Metric.tagged(ClusterMetrics.assignedShards, "address", address.toString()) :
|
429
439
|
ClusterMetrics.unassignedShards
|
@@ -443,17 +453,17 @@ export const make = Effect.gen(function*() {
|
|
443
453
|
const persistRunners = Effect.unsafeMakeSemaphore(1).withPermits(1)(withRetry(
|
444
454
|
Effect.suspend(() =>
|
445
455
|
storage.saveRunners(
|
446
|
-
Iterable.map(state.
|
456
|
+
Iterable.map(state.allRunners, ([address, runner]) => [address, runner.runner])
|
447
457
|
)
|
448
458
|
)
|
449
459
|
))
|
450
460
|
|
451
461
|
const persistAssignments = Effect.unsafeMakeSemaphore(1).withPermits(1)(withRetry(
|
452
|
-
Effect.suspend(() => storage.saveAssignments(state.
|
462
|
+
Effect.suspend(() => storage.saveAssignments(state.assignments))
|
453
463
|
))
|
454
464
|
|
455
465
|
const notifyUnhealthyRunner = Effect.fnUntraced(function*(address: RunnerAddress) {
|
456
|
-
if (!MutableHashMap.has(state.
|
466
|
+
if (!MutableHashMap.has(state.allRunners, address)) return
|
457
467
|
|
458
468
|
yield* Metric.increment(
|
459
469
|
Metric.tagged(ClusterMetrics.runnerHealthChecked, "runner_address", address.toString())
|
@@ -470,28 +480,24 @@ export const make = Effect.gen(function*() {
|
|
470
480
|
address: Option.Option<RunnerAddress>
|
471
481
|
): Effect.Effect<void, RunnerNotRegistered> {
|
472
482
|
return Effect.suspend(() => {
|
473
|
-
if (Option.isSome(address) && !MutableHashMap.has(state.
|
483
|
+
if (Option.isSome(address) && !MutableHashMap.has(state.allRunners, address.value)) {
|
474
484
|
return Effect.fail(new RunnerNotRegistered({ address: address.value }))
|
475
485
|
}
|
476
|
-
|
477
|
-
if (!state.shards.has(shardId)) continue
|
478
|
-
state.shards.set(shardId, address)
|
479
|
-
}
|
486
|
+
state.addAssignments(shards, address)
|
480
487
|
return Effect.void
|
481
488
|
})
|
482
489
|
}
|
483
490
|
|
484
|
-
const getAssignments = Effect.sync(() => state.
|
491
|
+
const getAssignments = Effect.sync(() => state.assignments)
|
485
492
|
|
486
493
|
let machineId = 0
|
487
494
|
const register = Effect.fnUntraced(function*(runner: Runner) {
|
488
495
|
yield* Effect.logInfo(`Registering runner ${Runner.pretty(runner)}`)
|
489
|
-
|
490
|
-
MutableHashMap.set(state.runners, runner.address, RunnerWithMetadata({ runner, registeredAt: now }))
|
496
|
+
state.addRunner(runner, clock.unsafeCurrentTimeMillis())
|
491
497
|
|
492
498
|
yield* Metric.increment(ClusterMetrics.runners)
|
493
499
|
yield* PubSub.publish(events, ShardingEvent.RunnerRegistered({ address: runner.address }))
|
494
|
-
if (state.
|
500
|
+
if (state.allUnassignedShards.length > 0) {
|
495
501
|
yield* rebalance(false)
|
496
502
|
}
|
497
503
|
yield* Effect.forkIn(persistRunners, scope)
|
@@ -499,18 +505,17 @@ export const make = Effect.gen(function*() {
|
|
499
505
|
})
|
500
506
|
|
501
507
|
const unregister = Effect.fnUntraced(function*(address: RunnerAddress) {
|
502
|
-
if (!MutableHashMap.has(state.
|
508
|
+
if (!MutableHashMap.has(state.allRunners, address)) return
|
503
509
|
|
504
510
|
yield* Effect.logInfo("Unregistering runner at address:", address)
|
505
511
|
const unassignments = Arr.empty<ShardId>()
|
506
|
-
for (const [shard, runner] of state.
|
512
|
+
for (const [shard, runner] of state.assignments) {
|
507
513
|
if (Option.isSome(runner) && Equal.equals(runner.value, address)) {
|
508
514
|
unassignments.push(shard)
|
509
|
-
state.shards.set(shard, Option.none())
|
510
515
|
}
|
511
516
|
}
|
512
|
-
|
513
|
-
|
517
|
+
state.addAssignments(unassignments, Option.none())
|
518
|
+
state.removeRunner(address)
|
514
519
|
yield* Metric.incrementBy(ClusterMetrics.runners, -1)
|
515
520
|
|
516
521
|
if (unassignments.length > 0) {
|
@@ -570,10 +575,30 @@ export const make = Effect.gen(function*() {
|
|
570
575
|
|
571
576
|
yield* Effect.sleep(config.rebalanceDebounce)
|
572
577
|
|
578
|
+
if (state.shards.size === 0) {
|
579
|
+
yield* Effect.logDebug("No shards to rebalance")
|
580
|
+
return
|
581
|
+
}
|
582
|
+
|
573
583
|
// Determine which shards to assign and unassign
|
574
|
-
const
|
575
|
-
|
576
|
-
|
584
|
+
const assignments = MutableHashMap.empty<RunnerAddress, MutableHashSet.MutableHashSet<ShardId>>()
|
585
|
+
const unassignments = MutableHashMap.empty<RunnerAddress, MutableHashSet.MutableHashSet<ShardId>>()
|
586
|
+
const changes = MutableHashSet.empty<RunnerAddress>()
|
587
|
+
for (const group of state.shards.keys()) {
|
588
|
+
const [groupAssignments, groupUnassignments, groupChanges] =
|
589
|
+
immediate || (state.unassignedShards(group).length > 0)
|
590
|
+
? decideAssignmentsForUnassignedShards(state, group)
|
591
|
+
: decideAssignmentsForUnbalancedShards(state, group, config.rebalanceRate)
|
592
|
+
for (const [address, shards] of groupAssignments) {
|
593
|
+
addAllNested(assignments, address, Array.from(shards, (id) => makeShardId(group, id)))
|
594
|
+
}
|
595
|
+
for (const [address, shards] of groupUnassignments) {
|
596
|
+
addAllNested(unassignments, address, Array.from(shards, (id) => makeShardId(group, id)))
|
597
|
+
}
|
598
|
+
for (const address of groupChanges) {
|
599
|
+
MutableHashSet.add(changes, address)
|
600
|
+
}
|
601
|
+
}
|
577
602
|
|
578
603
|
yield* Effect.logDebug(`Rebalancing shards (immediate = ${immediate})`)
|
579
604
|
|
@@ -615,7 +640,7 @@ export const make = Effect.gen(function*() {
|
|
615
640
|
return Effect.void
|
616
641
|
},
|
617
642
|
onSuccess: () => {
|
618
|
-
const shardCount =
|
643
|
+
const shardCount = MutableHashSet.size(shards)
|
619
644
|
return Metric.incrementBy(
|
620
645
|
Metric.tagged(ClusterMetrics.assignedShards, "runner_address", address.toString()),
|
621
646
|
-shardCount
|
@@ -635,9 +660,9 @@ export const make = Effect.gen(function*() {
|
|
635
660
|
// Remove failed shard unassignments from the assignments
|
636
661
|
MutableHashMap.forEach(assignments, (shards, address) => {
|
637
662
|
for (const shard of failedUnassignments) {
|
638
|
-
|
663
|
+
MutableHashSet.remove(shards, shard)
|
639
664
|
}
|
640
|
-
if (
|
665
|
+
if (MutableHashSet.size(shards) === 0) {
|
641
666
|
MutableHashMap.remove(assignments, address)
|
642
667
|
}
|
643
668
|
})
|
@@ -653,7 +678,7 @@ export const make = Effect.gen(function*() {
|
|
653
678
|
return Effect.void
|
654
679
|
},
|
655
680
|
onSuccess: () => {
|
656
|
-
const shardCount =
|
681
|
+
const shardCount = MutableHashSet.size(shards)
|
657
682
|
return Metric.incrementBy(
|
658
683
|
Metric.tagged(ClusterMetrics.assignedShards, "runner_address", address.toString()),
|
659
684
|
-shardCount
|
@@ -691,7 +716,7 @@ export const make = Effect.gen(function*() {
|
|
691
716
|
})
|
692
717
|
|
693
718
|
const checkRunnerHealth: Effect.Effect<void> = Effect.suspend(() =>
|
694
|
-
Effect.forEach(MutableHashMap.keys(state.
|
719
|
+
Effect.forEach(MutableHashMap.keys(state.allRunners), notifyUnhealthyRunner, {
|
695
720
|
concurrency: 10,
|
696
721
|
discard: true
|
697
722
|
})
|
@@ -710,7 +735,7 @@ export const make = Effect.gen(function*() {
|
|
710
735
|
|
711
736
|
// Rebalance immediately if there are unassigned shards
|
712
737
|
yield* Effect.forkIn(
|
713
|
-
rebalance(state.
|
738
|
+
rebalance(state.allUnassignedShards.length > 0),
|
714
739
|
scope
|
715
740
|
)
|
716
741
|
|
@@ -768,7 +793,11 @@ export const layerServerHandlers = Rpcs.toLayer(Effect.gen(function*() {
|
|
768
793
|
Register: ({ runner }) => shardManager.register(runner),
|
769
794
|
Unregister: ({ address }) => shardManager.unregister(address),
|
770
795
|
NotifyUnhealthyRunner: ({ address }) => shardManager.notifyUnhealthyRunner(address),
|
771
|
-
GetAssignments: () =>
|
796
|
+
GetAssignments: () =>
|
797
|
+
Effect.map(
|
798
|
+
shardManager.getAssignments,
|
799
|
+
(assignments) => Array.from(assignments)
|
800
|
+
),
|
772
801
|
ShardingEvents: Effect.fnUntraced(function*() {
|
773
802
|
const queue = yield* shardManager.shardingEvents
|
774
803
|
const mailbox = yield* Mailbox.make<ShardingEvent>()
|
package/src/ShardStorage.ts
CHANGED
@@ -1,6 +1,7 @@
|
|
1
1
|
/**
|
2
2
|
* @since 1.0.0
|
3
3
|
*/
|
4
|
+
import * as Arr from "effect/Array"
|
4
5
|
import * as Context from "effect/Context"
|
5
6
|
import * as Effect from "effect/Effect"
|
6
7
|
import * as Layer from "effect/Layer"
|
@@ -22,7 +23,10 @@ export class ShardStorage extends Context.Tag("@effect/cluster/ShardStorage")<Sh
|
|
22
23
|
/**
|
23
24
|
* Get the current assignments of shards to runners.
|
24
25
|
*/
|
25
|
-
readonly getAssignments: Effect.Effect<
|
26
|
+
readonly getAssignments: Effect.Effect<
|
27
|
+
Array<[ShardId, Option.Option<RunnerAddress>]>,
|
28
|
+
PersistenceError
|
29
|
+
>
|
26
30
|
|
27
31
|
/**
|
28
32
|
* Save the current state of shards assignments to runners.
|
@@ -87,7 +91,7 @@ export interface Encoded {
|
|
87
91
|
readonly getAssignments: Effect.Effect<
|
88
92
|
Array<
|
89
93
|
readonly [
|
90
|
-
shardId:
|
94
|
+
shardId: string,
|
91
95
|
runnerAddress: string | null
|
92
96
|
]
|
93
97
|
>,
|
@@ -98,7 +102,7 @@ export interface Encoded {
|
|
98
102
|
* Save the current state of shards assignments to runners.
|
99
103
|
*/
|
100
104
|
readonly saveAssignments: (
|
101
|
-
assignments: Array<readonly [shardId:
|
105
|
+
assignments: Array<readonly [shardId: string, RunnerAddress: string | null]>
|
102
106
|
) => Effect.Effect<void, PersistenceError>
|
103
107
|
|
104
108
|
/**
|
@@ -119,8 +123,8 @@ export interface Encoded {
|
|
119
123
|
*/
|
120
124
|
readonly acquire: (
|
121
125
|
address: string,
|
122
|
-
shardIds: ReadonlyArray<
|
123
|
-
) => Effect.Effect<Array<
|
126
|
+
shardIds: ReadonlyArray<string>
|
127
|
+
) => Effect.Effect<Array<string>, PersistenceError>
|
124
128
|
|
125
129
|
/**
|
126
130
|
* Refresh the lock on the given shards, returning the shards that were
|
@@ -128,15 +132,15 @@ export interface Encoded {
|
|
128
132
|
*/
|
129
133
|
readonly refresh: (
|
130
134
|
address: string,
|
131
|
-
shardIds: ReadonlyArray<
|
132
|
-
) => Effect.Effect<Array<
|
135
|
+
shardIds: ReadonlyArray<string>
|
136
|
+
) => Effect.Effect<Array<string>, PersistenceError>
|
133
137
|
|
134
138
|
/**
|
135
|
-
* Release the lock on the given
|
139
|
+
* Release the lock on the given shard.
|
136
140
|
*/
|
137
141
|
readonly release: (
|
138
142
|
address: string,
|
139
|
-
|
143
|
+
shardId: string
|
140
144
|
) => Effect.Effect<void, PersistenceError>
|
141
145
|
|
142
146
|
/**
|
@@ -149,35 +153,38 @@ export interface Encoded {
|
|
149
153
|
* @since 1.0.0
|
150
154
|
* @category layers
|
151
155
|
*/
|
152
|
-
export const makeEncoded =
|
153
|
-
|
154
|
-
|
155
|
-
return ShardStorage.of({
|
156
|
+
export const makeEncoded = (encoded: Encoded) =>
|
157
|
+
ShardStorage.of({
|
156
158
|
getAssignments: Effect.map(encoded.getAssignments, (assignments) => {
|
157
|
-
const
|
159
|
+
const arr = Arr.empty<[ShardId, Option.Option<RunnerAddress>]>()
|
158
160
|
for (const [shardId, runnerAddress] of assignments) {
|
159
|
-
|
160
|
-
ShardId.
|
161
|
+
arr.push([
|
162
|
+
ShardId.fromString(shardId),
|
161
163
|
runnerAddress === null ? Option.none() : Option.some(decodeRunnerAddress(runnerAddress))
|
162
|
-
)
|
164
|
+
])
|
163
165
|
}
|
164
|
-
return
|
166
|
+
return arr
|
165
167
|
}),
|
166
|
-
saveAssignments: (assignments) =>
|
167
|
-
|
168
|
-
|
169
|
-
|
170
|
-
(
|
171
|
-
|
172
|
-
|
173
|
-
|
174
|
-
)
|
168
|
+
saveAssignments: (assignments) => {
|
169
|
+
const arr = Arr.empty<readonly [string, string | null]>()
|
170
|
+
for (const [shardId, runnerAddress] of assignments) {
|
171
|
+
arr.push([
|
172
|
+
shardId.toString(),
|
173
|
+
Option.isNone(runnerAddress) ? null : encodeRunnerAddress(runnerAddress.value)
|
174
|
+
])
|
175
|
+
}
|
176
|
+
return encoded.saveAssignments(arr)
|
177
|
+
},
|
175
178
|
getRunners: Effect.gen(function*() {
|
176
179
|
const runners = yield* encoded.getRunners
|
177
|
-
const results
|
180
|
+
const results: Array<[RunnerAddress, Runner]> = []
|
178
181
|
for (let i = 0; i < runners.length; i++) {
|
179
182
|
const [address, runner] = runners[i]
|
180
|
-
|
183
|
+
try {
|
184
|
+
results.push([decodeRunnerAddress(address), Runner.decodeSync(runner)])
|
185
|
+
} catch {
|
186
|
+
//
|
187
|
+
}
|
181
188
|
}
|
182
189
|
return results
|
183
190
|
}),
|
@@ -187,24 +194,25 @@ export const makeEncoded = Effect.fnUntraced(function*(encoded: Encoded) {
|
|
187
194
|
Array.from(runners, ([address, runner]) => [encodeRunnerAddress(address), Runner.encodeSync(runner)])
|
188
195
|
)
|
189
196
|
),
|
190
|
-
acquire: (address, shardIds) =>
|
191
|
-
|
192
|
-
|
193
|
-
|
194
|
-
>,
|
195
|
-
refresh: (address, shardIds) => encoded.refresh(encodeRunnerAddress(address), Array.from(shardIds)) as any,
|
196
|
-
release: Effect.fnUntraced(function*(address, shardId) {
|
197
|
-
activeShards.delete(shardId)
|
198
|
-
yield* encoded.release(encodeRunnerAddress(address), shardId).pipe(
|
199
|
-
Effect.onError(() => Effect.sync(() => activeShards.add(shardId)))
|
197
|
+
acquire: (address, shardIds) => {
|
198
|
+
const arr = Array.from(shardIds, (id) => id.toString())
|
199
|
+
return encoded.acquire(encodeRunnerAddress(address), arr).pipe(
|
200
|
+
Effect.map((shards) => shards.map(ShardId.fromString))
|
200
201
|
)
|
201
|
-
}
|
202
|
-
|
203
|
-
|
204
|
-
|
205
|
-
|
202
|
+
},
|
203
|
+
refresh: (address, shardIds) => {
|
204
|
+
const arr = Array.from(shardIds, (id) => id.toString())
|
205
|
+
return encoded.refresh(encodeRunnerAddress(address), arr).pipe(
|
206
|
+
Effect.map((shards) => shards.map(ShardId.fromString))
|
207
|
+
)
|
208
|
+
},
|
209
|
+
release(address, shardId) {
|
210
|
+
return encoded.release(encodeRunnerAddress(address), shardId.toString())
|
211
|
+
},
|
212
|
+
releaseAll(address) {
|
213
|
+
return encoded.releaseAll(encodeRunnerAddress(address))
|
214
|
+
}
|
206
215
|
})
|
207
|
-
})
|
208
216
|
|
209
217
|
/**
|
210
218
|
* @since 1.0.0
|
@@ -215,7 +223,7 @@ export const layerNoop: Layer.Layer<ShardStorage> = Layer.sync(
|
|
215
223
|
() => {
|
216
224
|
let acquired: Array<ShardId> = []
|
217
225
|
return ShardStorage.of({
|
218
|
-
getAssignments: Effect.
|
226
|
+
getAssignments: Effect.sync(() => []),
|
219
227
|
saveAssignments: () => Effect.void,
|
220
228
|
getRunners: Effect.sync(() => []),
|
221
229
|
saveRunners: () => Effect.void,
|
@@ -235,13 +243,13 @@ export const layerNoop: Layer.Layer<ShardStorage> = Layer.sync(
|
|
235
243
|
* @category constructors
|
236
244
|
*/
|
237
245
|
export const makeMemory = Effect.gen(function*() {
|
238
|
-
const assignments =
|
246
|
+
const assignments = MutableHashMap.empty<ShardId, Option.Option<RunnerAddress>>()
|
239
247
|
const runners = MutableHashMap.empty<RunnerAddress, Runner>()
|
240
248
|
|
241
249
|
function saveAssignments(value: Iterable<readonly [ShardId, Option.Option<RunnerAddress>]>) {
|
242
250
|
return Effect.sync(() => {
|
243
251
|
for (const [shardId, runnerAddress] of value) {
|
244
|
-
|
252
|
+
MutableHashMap.set(assignments, shardId, runnerAddress)
|
245
253
|
}
|
246
254
|
})
|
247
255
|
}
|
@@ -257,7 +265,7 @@ export const makeMemory = Effect.gen(function*() {
|
|
257
265
|
let acquired: Array<ShardId> = []
|
258
266
|
|
259
267
|
return ShardStorage.of({
|
260
|
-
getAssignments: Effect.sync(() =>
|
268
|
+
getAssignments: Effect.sync(() => Array.from(assignments)),
|
261
269
|
saveAssignments,
|
262
270
|
getRunners: Effect.sync(() => Array.from(runners)),
|
263
271
|
saveRunners,
|