@effect/cluster 0.38.15 → 0.39.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/Entity.js +2 -0
- package/dist/cjs/Entity.js.map +1 -1
- package/dist/cjs/ShardManager.js +17 -28
- package/dist/cjs/ShardManager.js.map +1 -1
- package/dist/cjs/internal/shardManager.js +10 -17
- package/dist/cjs/internal/shardManager.js.map +1 -1
- package/dist/dts/Entity.d.ts +1 -0
- package/dist/dts/Entity.d.ts.map +1 -1
- package/dist/dts/ShardManager.d.ts +2 -2
- package/dist/dts/ShardManager.d.ts.map +1 -1
- package/dist/esm/Entity.js +2 -0
- package/dist/esm/Entity.js.map +1 -1
- package/dist/esm/ShardManager.js +18 -29
- package/dist/esm/ShardManager.js.map +1 -1
- package/dist/esm/internal/shardManager.js +9 -15
- package/dist/esm/internal/shardManager.js.map +1 -1
- package/package.json +6 -6
- package/src/Entity.ts +4 -0
- package/src/ShardManager.ts +36 -65
- package/src/internal/shardManager.ts +8 -23
package/src/ShardManager.ts
CHANGED
@@ -33,12 +33,7 @@ import * as Schema from "effect/Schema"
|
|
33
33
|
import type { Scope } from "effect/Scope"
|
34
34
|
import { RunnerNotRegistered } from "./ClusterError.js"
|
35
35
|
import * as ClusterMetrics from "./ClusterMetrics.js"
|
36
|
-
import {
|
37
|
-
addAllNested,
|
38
|
-
decideAssignmentsForUnassignedShards,
|
39
|
-
decideAssignmentsForUnbalancedShards,
|
40
|
-
State
|
41
|
-
} from "./internal/shardManager.js"
|
36
|
+
import { addAllNested, decideAssignmentsForShards, State } from "./internal/shardManager.js"
|
42
37
|
import * as MachineId from "./MachineId.js"
|
43
38
|
import { Runner } from "./Runner.js"
|
44
39
|
import { RunnerAddress } from "./RunnerAddress.js"
|
@@ -74,7 +69,7 @@ export class ShardManager extends Context.Tag("@effect/cluster/ShardManager")<Sh
|
|
74
69
|
/**
|
75
70
|
* Rebalance shards assigned to runners within the cluster.
|
76
71
|
*/
|
77
|
-
readonly rebalance:
|
72
|
+
readonly rebalance: Effect.Effect<void>
|
78
73
|
/**
|
79
74
|
* Notify the cluster of an unhealthy runner.
|
80
75
|
*/
|
@@ -130,7 +125,7 @@ export class Config extends Context.Tag("@effect/cluster/ShardManager/Config")<C
|
|
130
125
|
* @since 1.0.0
|
131
126
|
*/
|
132
127
|
static readonly defaults: Config["Type"] = {
|
133
|
-
rebalanceDebounce: Duration.
|
128
|
+
rebalanceDebounce: Duration.seconds(3),
|
134
129
|
rebalanceInterval: Duration.seconds(20),
|
135
130
|
rebalanceRetryInterval: Duration.seconds(10),
|
136
131
|
rebalanceRate: 2 / 100,
|
@@ -505,11 +500,8 @@ export const make = Effect.gen(function*() {
|
|
505
500
|
state.addRunner(runner, clock.unsafeCurrentTimeMillis())
|
506
501
|
updateRunnerMetrics()
|
507
502
|
yield* PubSub.publish(events, ShardingEvent.RunnerRegistered({ address: runner.address }))
|
508
|
-
|
509
|
-
if (state.allUnassignedShards.length > 0) {
|
510
|
-
yield* rebalance(false)
|
511
|
-
}
|
512
503
|
yield* Effect.forkIn(persistRunners, scope)
|
504
|
+
yield* Effect.forkIn(rebalance, scope)
|
513
505
|
return MachineId.make(++machineId)
|
514
506
|
})
|
515
507
|
|
@@ -532,52 +524,40 @@ export const make = Effect.gen(function*() {
|
|
532
524
|
}
|
533
525
|
|
534
526
|
yield* Effect.forkIn(persistRunners, scope)
|
535
|
-
yield* Effect.forkIn(rebalance
|
527
|
+
yield* Effect.forkIn(rebalance, scope)
|
536
528
|
})
|
537
529
|
|
538
530
|
let rebalancing = false
|
539
|
-
let nextRebalanceImmediate = false
|
540
531
|
let rebalanceDeferred: Deferred.Deferred<void> | undefined
|
541
532
|
const rebalanceFibers = yield* FiberSet.make()
|
542
533
|
|
543
|
-
const rebalance =
|
544
|
-
|
545
|
-
|
546
|
-
|
547
|
-
|
548
|
-
|
549
|
-
|
550
|
-
|
551
|
-
|
552
|
-
|
553
|
-
rebalanceDeferred = Deferred.unsafeMake(fiber.id())
|
554
|
-
}
|
555
|
-
return Deferred.await(rebalanceDeferred)
|
556
|
-
})
|
557
|
-
|
558
|
-
const rebalanceLoop = (immediate?: boolean): Effect.Effect<void> =>
|
559
|
-
Effect.suspend(() => {
|
560
|
-
const deferred = rebalanceDeferred
|
561
|
-
rebalanceDeferred = undefined
|
562
|
-
if (!immediate) {
|
563
|
-
immediate = nextRebalanceImmediate
|
564
|
-
nextRebalanceImmediate = false
|
565
|
-
}
|
566
|
-
return runRebalance(immediate).pipe(
|
567
|
-
deferred ? Effect.intoDeferred(deferred) : identity,
|
568
|
-
Effect.onExit(() => {
|
569
|
-
if (!rebalanceDeferred) {
|
570
|
-
rebalancing = false
|
571
|
-
return Effect.void
|
572
|
-
}
|
573
|
-
return Effect.forkIn(rebalanceLoop(), scope)
|
574
|
-
})
|
575
|
-
)
|
576
|
-
})
|
534
|
+
const rebalance = Effect.withFiberRuntime<void>((fiber) => {
|
535
|
+
if (!rebalancing) {
|
536
|
+
rebalancing = true
|
537
|
+
return rebalanceLoop
|
538
|
+
}
|
539
|
+
if (!rebalanceDeferred) {
|
540
|
+
rebalanceDeferred = Deferred.unsafeMake(fiber.id())
|
541
|
+
}
|
542
|
+
return Deferred.await(rebalanceDeferred)
|
543
|
+
})
|
577
544
|
|
578
|
-
const
|
579
|
-
|
545
|
+
const rebalanceLoop: Effect.Effect<void> = Effect.suspend(() => {
|
546
|
+
const deferred = rebalanceDeferred
|
547
|
+
rebalanceDeferred = undefined
|
548
|
+
return runRebalance.pipe(
|
549
|
+
deferred ? Effect.intoDeferred(deferred) : identity,
|
550
|
+
Effect.onExit(() => {
|
551
|
+
if (!rebalanceDeferred) {
|
552
|
+
rebalancing = false
|
553
|
+
return Effect.void
|
554
|
+
}
|
555
|
+
return Effect.forkIn(rebalanceLoop, scope)
|
556
|
+
})
|
557
|
+
)
|
558
|
+
})
|
580
559
|
|
560
|
+
const runRebalance = Effect.gen(function*() {
|
581
561
|
yield* Effect.sleep(config.rebalanceDebounce)
|
582
562
|
|
583
563
|
if (state.shards.size === 0) {
|
@@ -590,10 +570,7 @@ export const make = Effect.gen(function*() {
|
|
590
570
|
const unassignments = MutableHashMap.empty<RunnerAddress, MutableHashSet.MutableHashSet<ShardId>>()
|
591
571
|
const changes = MutableHashSet.empty<RunnerAddress>()
|
592
572
|
for (const group of state.shards.keys()) {
|
593
|
-
const [groupAssignments, groupUnassignments, groupChanges] =
|
594
|
-
immediate || (state.unassignedShards(group).length > 0)
|
595
|
-
? decideAssignmentsForUnassignedShards(state, group)
|
596
|
-
: decideAssignmentsForUnbalancedShards(state, group, config.rebalanceRate)
|
573
|
+
const [groupAssignments, groupUnassignments, groupChanges] = decideAssignmentsForShards(state, group)
|
597
574
|
for (const [address, shards] of groupAssignments) {
|
598
575
|
addAllNested(assignments, address, Array.from(shards, (id) => makeShardId(group, id)))
|
599
576
|
}
|
@@ -605,7 +582,7 @@ export const make = Effect.gen(function*() {
|
|
605
582
|
}
|
606
583
|
}
|
607
584
|
|
608
|
-
yield* Effect.logDebug(`Rebalancing shards
|
585
|
+
yield* Effect.logDebug(`Rebalancing shards`)
|
609
586
|
|
610
587
|
if (MutableHashSet.size(changes) === 0) return
|
611
588
|
|
@@ -691,16 +668,16 @@ export const make = Effect.gen(function*() {
|
|
691
668
|
yield* Effect.logWarning("Failed to rebalance runners: ", failedRunners)
|
692
669
|
}
|
693
670
|
|
694
|
-
if (wereFailures
|
671
|
+
if (wereFailures) {
|
695
672
|
// Try rebalancing again later if there were any failures
|
696
673
|
yield* Clock.sleep(config.rebalanceRetryInterval).pipe(
|
697
|
-
Effect.zipRight(rebalance
|
674
|
+
Effect.zipRight(rebalance),
|
698
675
|
Effect.forkIn(scope)
|
699
676
|
)
|
700
677
|
}
|
701
678
|
|
702
679
|
yield* persistAssignments
|
703
|
-
})
|
680
|
+
}).pipe(Effect.withSpan("ShardManager.rebalance", { captureStackTrace: false }))
|
704
681
|
|
705
682
|
const checkRunnerHealth: Effect.Effect<void> = Effect.suspend(() =>
|
706
683
|
Effect.forEach(MutableHashMap.keys(state.allRunners), notifyUnhealthyRunner, {
|
@@ -720,14 +697,8 @@ export const make = Effect.gen(function*() {
|
|
720
697
|
|
721
698
|
yield* Effect.forkIn(persistRunners, scope)
|
722
699
|
|
723
|
-
// Rebalance immediately if there are unassigned shards
|
724
|
-
yield* Effect.forkIn(
|
725
|
-
rebalance(state.allUnassignedShards.length > 0),
|
726
|
-
scope
|
727
|
-
)
|
728
|
-
|
729
700
|
// Start a regular cluster rebalance at the configured interval
|
730
|
-
yield* rebalance
|
701
|
+
yield* rebalance.pipe(
|
731
702
|
Effect.andThen(Effect.sleep(config.rebalanceInterval)),
|
732
703
|
Effect.forever,
|
733
704
|
Effect.forkIn(scope)
|
@@ -244,34 +244,26 @@ export interface RunnerWithMetadata {
|
|
244
244
|
/** @internal */
|
245
245
|
export const RunnerWithMetadata = (runner: RunnerWithMetadata): RunnerWithMetadata => runner
|
246
246
|
|
247
|
-
/** @internal */
|
248
|
-
export function decideAssignmentsForUnassignedShards(state: State, group: string): readonly [
|
249
|
-
assignments: MutableHashMap.MutableHashMap<RunnerAddress, Set<number>>,
|
250
|
-
unassignments: MutableHashMap.MutableHashMap<RunnerAddress, Set<number>>,
|
251
|
-
changes: MutableHashSet.MutableHashSet<RunnerAddress>
|
252
|
-
] {
|
253
|
-
return pickNewRunners(state.unassignedShards(group), state, group, true, 1)
|
254
|
-
}
|
255
|
-
|
256
247
|
const allocationOrder: Order.Order<[number, number, number]> = Order.combine(
|
257
248
|
Order.mapInput(Order.number, ([, shards]) => shards),
|
258
249
|
Order.mapInput(Order.number, ([, , registeredAt]) => registeredAt)
|
259
250
|
)
|
260
251
|
|
261
252
|
/** @internal */
|
262
|
-
export function
|
253
|
+
export function decideAssignmentsForShards(state: State, group: string): readonly [
|
263
254
|
assignments: MutableHashMap.MutableHashMap<RunnerAddress, Set<number>>,
|
264
255
|
unassignments: MutableHashMap.MutableHashMap<RunnerAddress, Set<number>>,
|
265
256
|
changes: MutableHashSet.MutableHashSet<RunnerAddress>
|
266
257
|
] {
|
267
258
|
const shardsPerRunner = state.shardsPerRunner(group)
|
268
259
|
const maxVersion = state.maxVersion
|
269
|
-
const
|
260
|
+
const shardsToRebalance = state.unassignedShards(group)
|
270
261
|
|
271
262
|
const runnerGroup = state.runners.get(group)!
|
272
263
|
const shardsGroup = state.shards.get(group)!
|
273
264
|
|
274
265
|
if (state.allRunnersHaveVersion(maxVersion)) {
|
266
|
+
const extraShardsToAllocate = Arr.empty<[number, shardsInverse: number, registeredAt: number]>()
|
275
267
|
const averageShardsPerRunner = state.averageShardsPerRunner(group)
|
276
268
|
MutableHashMap.forEach(shardsPerRunner, (shards) => {
|
277
269
|
// Count how many extra shards there are compared to the average
|
@@ -296,19 +288,19 @@ export function decideAssignmentsForUnbalancedShards(state: State, group: string
|
|
296
288
|
])
|
297
289
|
}
|
298
290
|
})
|
291
|
+
extraShardsToAllocate.sort(allocationOrder)
|
292
|
+
for (let i = 0; i < extraShardsToAllocate.length; i++) {
|
293
|
+
shardsToRebalance.push(extraShardsToAllocate[i][0])
|
294
|
+
}
|
299
295
|
}
|
300
296
|
|
301
|
-
|
302
|
-
|
303
|
-
return pickNewRunners(sortedShardsToRebalance, state, group, false, rate, shardsPerRunner, maxVersion)
|
297
|
+
return pickNewRunners(shardsToRebalance, state, group, shardsPerRunner, maxVersion)
|
304
298
|
}
|
305
299
|
|
306
300
|
function pickNewRunners(
|
307
301
|
shardsToRebalance: ReadonlyArray<number>,
|
308
302
|
state: State,
|
309
303
|
group: string,
|
310
|
-
immediate: boolean,
|
311
|
-
rate: number,
|
312
304
|
shardsPerRunner = state.shardsPerRunner(group),
|
313
305
|
maybeMaxVersion = state.maxVersion
|
314
306
|
): readonly [
|
@@ -343,13 +335,6 @@ function pickNewRunners(
|
|
343
335
|
// Do not assign to a runner that has unassignments in the same rebalance
|
344
336
|
if (MutableHashMap.has(unassignments, address)) continue
|
345
337
|
|
346
|
-
// Do not assign too many shards to each runner unless rebalancing must
|
347
|
-
// occur immediately
|
348
|
-
if (!immediate) {
|
349
|
-
const assignmentCount = Option.getOrUndefined(MutableHashMap.get(addressAssignments, address))?.size ?? 0
|
350
|
-
if (assignmentCount >= shardsGroup.size * rate) continue
|
351
|
-
}
|
352
|
-
|
353
338
|
if (candidate === undefined || shards.size < candidateShards!.size) {
|
354
339
|
candidate = address
|
355
340
|
candidateShards = shards
|