effect 3.12.5 → 3.12.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (105) hide show
  1. package/dist/cjs/Array.js +11 -9
  2. package/dist/cjs/Array.js.map +1 -1
  3. package/dist/cjs/Cause.js +26 -2
  4. package/dist/cjs/Cause.js.map +1 -1
  5. package/dist/cjs/Cron.js +55 -23
  6. package/dist/cjs/Cron.js.map +1 -1
  7. package/dist/cjs/Duration.js.map +1 -1
  8. package/dist/cjs/Effect.js +32 -23
  9. package/dist/cjs/Effect.js.map +1 -1
  10. package/dist/cjs/FiberHandle.js +8 -8
  11. package/dist/cjs/FiberMap.js +8 -8
  12. package/dist/cjs/FiberSet.js +8 -8
  13. package/dist/cjs/LogLevel.js +30 -2
  14. package/dist/cjs/LogLevel.js.map +1 -1
  15. package/dist/cjs/ParseResult.js +36 -16
  16. package/dist/cjs/ParseResult.js.map +1 -1
  17. package/dist/cjs/Schema.js +40 -27
  18. package/dist/cjs/Schema.js.map +1 -1
  19. package/dist/cjs/internal/cause.js.map +1 -1
  20. package/dist/cjs/internal/core-effect.js +6 -5
  21. package/dist/cjs/internal/core-effect.js.map +1 -1
  22. package/dist/cjs/internal/core.js +6 -5
  23. package/dist/cjs/internal/core.js.map +1 -1
  24. package/dist/cjs/internal/groupBy.js +7 -7
  25. package/dist/cjs/internal/groupBy.js.map +1 -1
  26. package/dist/cjs/internal/rateLimiter.js +8 -7
  27. package/dist/cjs/internal/rateLimiter.js.map +1 -1
  28. package/dist/cjs/internal/runtime.js +7 -11
  29. package/dist/cjs/internal/runtime.js.map +1 -1
  30. package/dist/cjs/internal/stream.js +5 -5
  31. package/dist/cjs/internal/stream.js.map +1 -1
  32. package/dist/cjs/internal/version.js +1 -1
  33. package/dist/dts/Array.d.ts +62 -16
  34. package/dist/dts/Array.d.ts.map +1 -1
  35. package/dist/dts/Cause.d.ts +27 -3
  36. package/dist/dts/Cause.d.ts.map +1 -1
  37. package/dist/dts/Cron.d.ts +10 -3
  38. package/dist/dts/Cron.d.ts.map +1 -1
  39. package/dist/dts/Duration.d.ts +5 -5
  40. package/dist/dts/Duration.d.ts.map +1 -1
  41. package/dist/dts/Effect.d.ts +31 -22
  42. package/dist/dts/Effect.d.ts.map +1 -1
  43. package/dist/dts/FiberHandle.d.ts +8 -8
  44. package/dist/dts/FiberMap.d.ts +8 -8
  45. package/dist/dts/FiberSet.d.ts +8 -8
  46. package/dist/dts/LogLevel.d.ts +90 -6
  47. package/dist/dts/LogLevel.d.ts.map +1 -1
  48. package/dist/dts/ParseResult.d.ts.map +1 -1
  49. package/dist/dts/Schema.d.ts +19 -18
  50. package/dist/dts/Schema.d.ts.map +1 -1
  51. package/dist/dts/internal/core-effect.d.ts.map +1 -1
  52. package/dist/dts/internal/core.d.ts.map +1 -1
  53. package/dist/dts/internal/stream.d.ts.map +1 -1
  54. package/dist/esm/Array.js +11 -9
  55. package/dist/esm/Array.js.map +1 -1
  56. package/dist/esm/Cause.js +26 -2
  57. package/dist/esm/Cause.js.map +1 -1
  58. package/dist/esm/Cron.js +53 -22
  59. package/dist/esm/Cron.js.map +1 -1
  60. package/dist/esm/Duration.js.map +1 -1
  61. package/dist/esm/Effect.js +32 -23
  62. package/dist/esm/Effect.js.map +1 -1
  63. package/dist/esm/FiberHandle.js +8 -8
  64. package/dist/esm/FiberMap.js +8 -8
  65. package/dist/esm/FiberSet.js +8 -8
  66. package/dist/esm/LogLevel.js +30 -2
  67. package/dist/esm/LogLevel.js.map +1 -1
  68. package/dist/esm/ParseResult.js +36 -16
  69. package/dist/esm/ParseResult.js.map +1 -1
  70. package/dist/esm/Schema.js +39 -26
  71. package/dist/esm/Schema.js.map +1 -1
  72. package/dist/esm/internal/cause.js.map +1 -1
  73. package/dist/esm/internal/core-effect.js +6 -5
  74. package/dist/esm/internal/core-effect.js.map +1 -1
  75. package/dist/esm/internal/core.js +6 -5
  76. package/dist/esm/internal/core.js.map +1 -1
  77. package/dist/esm/internal/groupBy.js +7 -7
  78. package/dist/esm/internal/groupBy.js.map +1 -1
  79. package/dist/esm/internal/rateLimiter.js +8 -7
  80. package/dist/esm/internal/rateLimiter.js.map +1 -1
  81. package/dist/esm/internal/runtime.js +7 -11
  82. package/dist/esm/internal/runtime.js.map +1 -1
  83. package/dist/esm/internal/stream.js +5 -5
  84. package/dist/esm/internal/stream.js.map +1 -1
  85. package/dist/esm/internal/version.js +1 -1
  86. package/package.json +1 -1
  87. package/src/Array.ts +65 -19
  88. package/src/Cause.ts +27 -3
  89. package/src/Cron.ts +30 -27
  90. package/src/Duration.ts +11 -3
  91. package/src/Effect.ts +35 -23
  92. package/src/FiberHandle.ts +8 -8
  93. package/src/FiberMap.ts +8 -8
  94. package/src/FiberSet.ts +8 -8
  95. package/src/LogLevel.ts +90 -6
  96. package/src/ParseResult.ts +37 -26
  97. package/src/Schema.ts +45 -34
  98. package/src/internal/cause.ts +1 -1
  99. package/src/internal/core-effect.ts +16 -9
  100. package/src/internal/core.ts +9 -4
  101. package/src/internal/groupBy.ts +35 -39
  102. package/src/internal/rateLimiter.ts +8 -7
  103. package/src/internal/runtime.ts +6 -14
  104. package/src/internal/stream.ts +13 -15
  105. package/src/internal/version.ts +1 -1
package/src/LogLevel.ts CHANGED
@@ -171,24 +171,108 @@ export const None: LogLevel = core.logLevelNone
171
171
  export const allLevels = core.allLogLevels
172
172
 
173
173
  /**
174
- * Locally applies the specified `LogLevel` to an `Effect` workflow, reverting
175
- * to the previous `LogLevel` after the `Effect` workflow completes.
174
+ * Temporarily sets a `LogLevel` for an `Effect` workflow.
175
+ *
176
+ * **Details**
177
+ *
178
+ * This function allows you to apply a specific `LogLevel` locally to an
179
+ * `Effect` workflow. Once the workflow completes, the `LogLevel` reverts to its
180
+ * previous state.
181
+ *
182
+ * **When to Use**
183
+ *
184
+ * This is particularly useful when you want to adjust the verbosity of logging
185
+ * for specific parts of your program without affecting the global log level.
186
+ *
187
+ * @example
188
+ * ```ts
189
+ * import { Effect, LogLevel } from "effect"
190
+ *
191
+ * const program = Effect.gen(function*() {
192
+ * yield* Effect.log("message1")
193
+ * yield* Effect.gen(function*() {
194
+ * yield* Effect.log("message2")
195
+ * yield* Effect.log("message3")
196
+ * }).pipe(LogLevel.locally(LogLevel.Warning))
197
+ * })
198
+ *
199
+ * // Effect.runFork(program)
200
+ * // timestamp=... level=INFO fiber=#0 message=message1
201
+ * // timestamp=... level=WARN fiber=#0 message=message2
202
+ * // timestamp=... level=WARN fiber=#0 message=message3
203
+ * ```
176
204
  *
177
205
  * @since 2.0.0
178
206
  * @category utils
179
207
  */
180
208
  export const locally: {
181
209
  /**
182
- * Locally applies the specified `LogLevel` to an `Effect` workflow, reverting
183
- * to the previous `LogLevel` after the `Effect` workflow completes.
210
+ * Temporarily sets a `LogLevel` for an `Effect` workflow.
211
+ *
212
+ * **Details**
213
+ *
214
+ * This function allows you to apply a specific `LogLevel` locally to an
215
+ * `Effect` workflow. Once the workflow completes, the `LogLevel` reverts to its
216
+ * previous state.
217
+ *
218
+ * **When to Use**
219
+ *
220
+ * This is particularly useful when you want to adjust the verbosity of logging
221
+ * for specific parts of your program without affecting the global log level.
222
+ *
223
+ * @example
224
+ * ```ts
225
+ * import { Effect, LogLevel } from "effect"
226
+ *
227
+ * const program = Effect.gen(function*() {
228
+ * yield* Effect.log("message1")
229
+ * yield* Effect.gen(function*() {
230
+ * yield* Effect.log("message2")
231
+ * yield* Effect.log("message3")
232
+ * }).pipe(LogLevel.locally(LogLevel.Warning))
233
+ * })
234
+ *
235
+ * // Effect.runFork(program)
236
+ * // timestamp=... level=INFO fiber=#0 message=message1
237
+ * // timestamp=... level=WARN fiber=#0 message=message2
238
+ * // timestamp=... level=WARN fiber=#0 message=message3
239
+ * ```
184
240
  *
185
241
  * @since 2.0.0
186
242
  * @category utils
187
243
  */
188
244
  (self: LogLevel): <A, E, R>(use: Effect.Effect<A, E, R>) => Effect.Effect<A, E, R>
189
245
  /**
190
- * Locally applies the specified `LogLevel` to an `Effect` workflow, reverting
191
- * to the previous `LogLevel` after the `Effect` workflow completes.
246
+ * Temporarily sets a `LogLevel` for an `Effect` workflow.
247
+ *
248
+ * **Details**
249
+ *
250
+ * This function allows you to apply a specific `LogLevel` locally to an
251
+ * `Effect` workflow. Once the workflow completes, the `LogLevel` reverts to its
252
+ * previous state.
253
+ *
254
+ * **When to Use**
255
+ *
256
+ * This is particularly useful when you want to adjust the verbosity of logging
257
+ * for specific parts of your program without affecting the global log level.
258
+ *
259
+ * @example
260
+ * ```ts
261
+ * import { Effect, LogLevel } from "effect"
262
+ *
263
+ * const program = Effect.gen(function*() {
264
+ * yield* Effect.log("message1")
265
+ * yield* Effect.gen(function*() {
266
+ * yield* Effect.log("message2")
267
+ * yield* Effect.log("message3")
268
+ * }).pipe(LogLevel.locally(LogLevel.Warning))
269
+ * })
270
+ *
271
+ * // Effect.runFork(program)
272
+ * // timestamp=... level=INFO fiber=#0 message=message1
273
+ * // timestamp=... level=WARN fiber=#0 message=message2
274
+ * // timestamp=... level=WARN fiber=#0 message=message3
275
+ * ```
192
276
  *
193
277
  * @since 2.0.0
194
278
  * @category utils
@@ -1419,9 +1419,10 @@ const go = (ast: AST.AST, isDecoding: boolean): Parser => {
1419
1419
  case "Union": {
1420
1420
  const searchTree = getSearchTree(ast.types, isDecoding)
1421
1421
  const ownKeys = util_.ownKeys(searchTree.keys)
1422
- const len = ownKeys.length
1422
+ const ownKeysLen = ownKeys.length
1423
+ const astTypesLen = ast.types.length
1423
1424
  const map = new Map<any, Parser>()
1424
- for (let i = 0; i < ast.types.length; i++) {
1425
+ for (let i = 0; i < astTypesLen; i++) {
1425
1426
  map.set(ast.types[i], goMemo(ast.types[i], isDecoding))
1426
1427
  }
1427
1428
  const concurrency = getConcurrency(ast) ?? 1
@@ -1430,9 +1431,9 @@ const go = (ast: AST.AST, isDecoding: boolean): Parser => {
1430
1431
  const es: Array<[number, ParseIssue]> = []
1431
1432
  let stepKey = 0
1432
1433
  let candidates: Array<AST.AST> = []
1433
- if (len > 0) {
1434
+ if (ownKeysLen > 0) {
1434
1435
  if (Predicate.isRecordOrArray(input)) {
1435
- for (let i = 0; i < len; i++) {
1436
+ for (let i = 0; i < ownKeysLen; i++) {
1436
1437
  const name = ownKeys[i]
1437
1438
  const buckets = searchTree.keys[name].buckets
1438
1439
  // for each property that should contain a literal, check if the input contains that property
@@ -1443,33 +1444,33 @@ const go = (ast: AST.AST, isDecoding: boolean): Parser => {
1443
1444
  // retrive the minimal set of candidates for decoding
1444
1445
  candidates = candidates.concat(buckets[literal])
1445
1446
  } else {
1446
- const literals = AST.Union.make(searchTree.keys[name].literals)
1447
+ const { candidates, literals } = searchTree.keys[name]
1448
+ const literalsUnion = AST.Union.make(literals)
1449
+ const errorAst = candidates.length === astTypesLen
1450
+ ? new AST.TypeLiteral([new AST.PropertySignature(name, literalsUnion, false, true)], [])
1451
+ : AST.Union.make(candidates)
1447
1452
  es.push([
1448
1453
  stepKey++,
1449
- new Composite(
1450
- new AST.TypeLiteral([
1451
- new AST.PropertySignature(name, literals, false, true)
1452
- ], []),
1453
- input,
1454
- new Pointer(name, input, new Type(literals, input[name]))
1455
- )
1454
+ new Composite(errorAst, input, new Pointer(name, input, new Type(literalsUnion, input[name])))
1456
1455
  ])
1457
1456
  }
1458
1457
  } else {
1459
- const literals = AST.Union.make(searchTree.keys[name].literals)
1460
- const fakeps = new AST.PropertySignature(name, literals, false, true)
1458
+ const { candidates, literals } = searchTree.keys[name]
1459
+ const fakePropertySignature = new AST.PropertySignature(name, AST.Union.make(literals), false, true)
1460
+ const errorAst = candidates.length === astTypesLen
1461
+ ? new AST.TypeLiteral([fakePropertySignature], [])
1462
+ : AST.Union.make(candidates)
1461
1463
  es.push([
1462
1464
  stepKey++,
1463
- new Composite(
1464
- new AST.TypeLiteral([fakeps], []),
1465
- input,
1466
- new Pointer(name, input, new Missing(fakeps))
1467
- )
1465
+ new Composite(errorAst, input, new Pointer(name, input, new Missing(fakePropertySignature)))
1468
1466
  ])
1469
1467
  }
1470
1468
  }
1471
1469
  } else {
1472
- es.push([stepKey++, new Type(ast, input)])
1470
+ const errorAst = searchTree.candidates.length === astTypesLen
1471
+ ? ast
1472
+ : AST.Union.make(searchTree.candidates)
1473
+ es.push([stepKey++, new Type(errorAst, input)])
1473
1474
  }
1474
1475
  }
1475
1476
  if (searchTree.otherwise.length > 0) {
@@ -1608,16 +1609,19 @@ export const getLiterals = (
1608
1609
  }
1609
1610
 
1610
1611
  /**
1611
- * The purpose of the algorithm is to narrow down the pool of possible candidates for decoding as much as possible.
1612
+ * The purpose of the algorithm is to narrow down the pool of possible
1613
+ * candidates for decoding as much as possible.
1612
1614
  *
1613
1615
  * This function separates the schemas into two groups, `keys` and `otherwise`:
1614
1616
  *
1615
1617
  * - `keys`: the schema has at least one property with a literal value
1616
1618
  * - `otherwise`: the schema has no properties with a literal value
1617
1619
  *
1618
- * If a schema has at least one property with a literal value, so it ends up in `keys`, first a namespace is created for
1619
- * the name of the property containing the literal, and then within this namespace a "bucket" is created for the literal
1620
- * value in which to store all the schemas that have the same property and literal value.
1620
+ * If a schema has at least one property with a literal value, so it ends up in
1621
+ * `keys`, first a namespace is created for the name of the property containing
1622
+ * the literal, and then within this namespace a "bucket" is created for the
1623
+ * literal value in which to store all the schemas that have the same property
1624
+ * and literal value.
1621
1625
  *
1622
1626
  * @internal
1623
1627
  */
@@ -1629,25 +1633,30 @@ export const getSearchTree = (
1629
1633
  readonly [key: PropertyKey]: {
1630
1634
  buckets: { [literal: string]: ReadonlyArray<AST.AST> }
1631
1635
  literals: ReadonlyArray<AST.Literal> // this is for error messages
1636
+ candidates: ReadonlyArray<AST.AST>
1632
1637
  }
1633
1638
  }
1634
1639
  otherwise: ReadonlyArray<AST.AST>
1640
+ candidates: ReadonlyArray<AST.AST>
1635
1641
  } => {
1636
1642
  const keys: {
1637
1643
  [key: PropertyKey]: {
1638
1644
  buckets: { [literal: string]: Array<AST.AST> }
1639
1645
  literals: Array<AST.Literal>
1646
+ candidates: Array<AST.AST>
1640
1647
  }
1641
1648
  } = {}
1642
1649
  const otherwise: Array<AST.AST> = []
1650
+ const candidates: Array<AST.AST> = []
1643
1651
  for (let i = 0; i < members.length; i++) {
1644
1652
  const member = members[i]
1645
1653
  const tags = getLiterals(member, isDecoding)
1646
1654
  if (tags.length > 0) {
1655
+ candidates.push(member)
1647
1656
  for (let j = 0; j < tags.length; j++) {
1648
1657
  const [key, literal] = tags[j]
1649
1658
  const hash = String(literal.literal)
1650
- keys[key] = keys[key] || { buckets: {}, literals: [] }
1659
+ keys[key] = keys[key] || { buckets: {}, literals: [], candidates: [] }
1651
1660
  const buckets = keys[key].buckets
1652
1661
  if (Object.prototype.hasOwnProperty.call(buckets, hash)) {
1653
1662
  if (j < tags.length - 1) {
@@ -1655,9 +1664,11 @@ export const getSearchTree = (
1655
1664
  }
1656
1665
  buckets[hash].push(member)
1657
1666
  keys[key].literals.push(literal)
1667
+ keys[key].candidates.push(member)
1658
1668
  } else {
1659
1669
  buckets[hash] = [member]
1660
1670
  keys[key].literals.push(literal)
1671
+ keys[key].candidates.push(member)
1661
1672
  break
1662
1673
  }
1663
1674
  }
@@ -1665,7 +1676,7 @@ export const getSearchTree = (
1665
1676
  otherwise.push(member)
1666
1677
  }
1667
1678
  }
1668
- return { keys, otherwise }
1679
+ return { keys, otherwise, candidates }
1669
1680
  }
1670
1681
 
1671
1682
  const dropRightRefinement = (ast: AST.AST): AST.AST => AST.isRefinement(ast) ? dropRightRefinement(ast.from) : ast
package/src/Schema.ts CHANGED
@@ -3016,9 +3016,7 @@ export const pluck: {
3016
3016
  * @category struct transformations
3017
3017
  * @since 3.10.0
3018
3018
  */
3019
- <A, I, K extends keyof A & keyof I>(
3020
- key: K
3021
- ): <R>(schema: Schema<A, I, R>) => Schema<A[K], { readonly [P in K]: I[P] }, R>
3019
+ <A, I, K extends keyof A & keyof I>(key: K): <R>(schema: Schema<A, I, R>) => Schema<A[K], Simplify<Pick<I, K>>, R>
3022
3020
  /**
3023
3021
  * Given a schema `Schema<A, I, R>` and a key `key: K`, this function extracts a specific field from the `A` type,
3024
3022
  * producing a new schema that represents a transformation from the `{ readonly [key]: I[K] }` type to `A[K]`.
@@ -3049,7 +3047,7 @@ export const pluck: {
3049
3047
  * @category struct transformations
3050
3048
  * @since 3.10.0
3051
3049
  */
3052
- <A, I, R, K extends keyof A & keyof I>(schema: Schema<A, I, R>, key: K): Schema<A[K], { readonly [P in K]: I[P] }, R>
3050
+ <A, I, R, K extends keyof A & keyof I>(schema: Schema<A, I, R>, key: K): Schema<A[K], Simplify<Pick<I, K>>, R>
3053
3051
  } = dual(
3054
3052
  2,
3055
3053
  <A, I, R, K extends keyof A & keyof I>(
@@ -3722,7 +3720,8 @@ export type RefineSchemaId = typeof RefineSchemaId
3722
3720
  export interface refine<A, From extends Schema.Any>
3723
3721
  extends AnnotableClass<refine<A, From>, A, Schema.Encoded<From>, Schema.Context<From>>
3724
3722
  {
3725
- readonly [RefineSchemaId]: From // required for `type HasFields = ...`
3723
+ /** The following is required for {@link HasFields} to work */
3724
+ readonly [RefineSchemaId]: From
3726
3725
  readonly from: From
3727
3726
  readonly filter: (
3728
3727
  a: Schema.Type<From>,
@@ -6030,7 +6029,7 @@ export const NonNegativeBigInt: filter<Schema<bigint, string>> = BigInt$.pipe(
6030
6029
  */
6031
6030
  export class BigIntFromNumber extends transformOrFail(
6032
6031
  Number$.annotations({ description: "a number to be decoded into a bigint" }),
6033
- BigIntFromSelf,
6032
+ BigIntFromSelf.pipe(betweenBigInt(BigInt(Number.MIN_SAFE_INTEGER), BigInt(Number.MAX_SAFE_INTEGER))),
6034
6033
  {
6035
6034
  strict: true,
6036
6035
  decode: (n, _, ast) =>
@@ -6147,29 +6146,23 @@ export class DurationFromSelf extends declare(
6147
6146
  arbitrary: (): LazyArbitrary<duration_.Duration> => (fc) =>
6148
6147
  fc.oneof(
6149
6148
  fc.constant(duration_.infinity),
6150
- fc.bigUint().map((_) => duration_.nanos(_)),
6151
- fc.bigUint().map((_) => duration_.micros(_)),
6152
- fc.maxSafeNat().map((_) => duration_.millis(_)),
6153
- fc.maxSafeNat().map((_) => duration_.seconds(_)),
6154
- fc.maxSafeNat().map((_) => duration_.minutes(_)),
6155
- fc.maxSafeNat().map((_) => duration_.hours(_)),
6156
- fc.maxSafeNat().map((_) => duration_.days(_)),
6157
- fc.maxSafeNat().map((_) => duration_.weeks(_))
6149
+ fc.bigInt({ min: 0n }).map((_) => duration_.nanos(_)),
6150
+ fc.maxSafeNat().map((_) => duration_.millis(_))
6158
6151
  ),
6159
6152
  equivalence: (): Equivalence.Equivalence<duration_.Duration> => duration_.Equivalence
6160
6153
  }
6161
6154
  ) {}
6162
6155
 
6163
6156
  /**
6164
- * A schema that transforms a `bigint` tuple into a `Duration`.
6165
- * Treats the value as the number of nanoseconds.
6157
+ * A schema that transforms a non negative `bigint` into a `Duration`. Treats
6158
+ * the value as the number of nanoseconds.
6166
6159
  *
6167
6160
  * @category Duration transformations
6168
6161
  * @since 3.10.0
6169
6162
  */
6170
6163
  export class DurationFromNanos extends transformOrFail(
6171
- BigIntFromSelf.annotations({ description: "a bigint to be decoded into a Duration" }),
6172
- DurationFromSelf,
6164
+ NonNegativeBigIntFromSelf.annotations({ description: "a bigint to be decoded into a Duration" }),
6165
+ DurationFromSelf.pipe(filter((duration) => duration_.isFinite(duration), { description: "a finite duration" })),
6173
6166
  {
6174
6167
  strict: true,
6175
6168
  decode: (nanos) => ParseResult.succeed(duration_.nanos(nanos)),
@@ -6177,48 +6170,66 @@ export class DurationFromNanos extends transformOrFail(
6177
6170
  option_.match(duration_.toNanos(duration), {
6178
6171
  onNone: () =>
6179
6172
  ParseResult.fail(new ParseResult.Type(ast, duration, `Unable to encode ${duration} into a bigint`)),
6180
- onSome: (val) => ParseResult.succeed(val)
6173
+ onSome: (nanos) => ParseResult.succeed(nanos)
6181
6174
  })
6182
6175
  }
6183
6176
  ).annotations({ identifier: "DurationFromNanos" }) {}
6184
6177
 
6185
6178
  /**
6186
- * A schema that transforms a `number` tuple into a `Duration`.
6187
- * Treats the value as the number of milliseconds.
6179
+ * A non-negative integer. +Infinity is excluded.
6180
+ *
6181
+ * @category number constructors
6182
+ * @since 3.11.10
6183
+ */
6184
+ export const NonNegativeInt = NonNegative.pipe(int()).annotations({ identifier: "NonNegativeInt" })
6185
+
6186
+ /**
6187
+ * A schema that transforms a (possibly Infinite) non negative number into a
6188
+ * `Duration`. Treats the value as the number of milliseconds.
6188
6189
  *
6189
6190
  * @category Duration transformations
6190
6191
  * @since 3.10.0
6191
6192
  */
6192
6193
  export class DurationFromMillis extends transform(
6193
- Number$.annotations({ description: "a number to be decoded into a Duration" }),
6194
+ NonNegative.annotations({
6195
+ description: "a non-negative number to be decoded into a Duration"
6196
+ }),
6194
6197
  DurationFromSelf,
6195
- { strict: true, decode: (ms) => duration_.millis(ms), encode: (n) => duration_.toMillis(n) }
6198
+ {
6199
+ strict: true,
6200
+ decode: (ms) => duration_.millis(ms),
6201
+ encode: (duration) => duration_.toMillis(duration)
6202
+ }
6196
6203
  ).annotations({ identifier: "DurationFromMillis" }) {}
6197
6204
 
6198
- /**
6199
- * @category number constructors
6200
- * @since 3.11.10
6201
- */
6202
- export const NonNegativeInt = NonNegative.pipe(int()).annotations({ identifier: "NonNegativeInt" })
6203
-
6204
- const HRTime: Schema<readonly [seconds: number, nanos: number]> = Tuple(
6205
+ const FiniteHRTime = Tuple(
6205
6206
  element(NonNegativeInt).annotations({ title: "seconds" }),
6206
6207
  element(NonNegativeInt).annotations({ title: "nanos" })
6207
- ).annotations({ identifier: "HRTime" })
6208
+ ).annotations({ identifier: "FiniteHRTime" })
6209
+
6210
+ const InfiniteHRTime = Tuple(Literal(-1), Literal(0)).annotations({ identifier: "InfiniteHRTime" })
6211
+
6212
+ const HRTime: Schema<readonly [seconds: number, nanos: number]> = Union(FiniteHRTime, InfiniteHRTime).annotations({
6213
+ identifier: "HRTime",
6214
+ description: "a tuple of seconds and nanos to be decoded into a Duration"
6215
+ })
6208
6216
 
6209
6217
  /**
6210
6218
  * A schema that transforms a `[number, number]` tuple into a `Duration`.
6211
6219
  *
6220
+ * Infinite durations are encoded as `[-1, 0]`.
6221
+ *
6212
6222
  * @category Duration transformations
6213
6223
  * @since 3.10.0
6214
6224
  */
6215
6225
  export class Duration extends transform(
6216
- HRTime.annotations({ description: "a tuple of seconds and nanos to be decoded into a Duration" }),
6226
+ HRTime,
6217
6227
  DurationFromSelf,
6218
6228
  {
6219
6229
  strict: true,
6220
- decode: ([seconds, nanos]) => duration_.nanos(BigInt(seconds) * BigInt(1e9) + BigInt(nanos)),
6221
- encode: (duration) => duration_.toHrTime(duration)
6230
+ decode: ([seconds, nanos]) =>
6231
+ seconds === -1 ? duration_.infinity : duration_.nanos(BigInt(seconds) * BigInt(1e9) + BigInt(nanos)),
6232
+ encode: (duration) => duration.value._tag === "Infinity" ? [-1, 0] as const : duration_.toHrTime(duration)
6222
6233
  }
6223
6234
  ).annotations({ identifier: "Duration" }) {}
6224
6235
 
@@ -132,7 +132,7 @@ export const sequential = <E, E2>(left: Cause.Cause<E>, right: Cause.Cause<E2>):
132
132
  // -----------------------------------------------------------------------------
133
133
 
134
134
  /** @internal */
135
- export const isCause = (u: unknown): u is Cause.Cause<never> => hasProperty(u, CauseTypeId)
135
+ export const isCause = (u: unknown): u is Cause.Cause<unknown> => hasProperty(u, CauseTypeId)
136
136
 
137
137
  /** @internal */
138
138
  export const isEmptyType = <E>(self: Cause.Cause<E>): self is Cause.Empty => self._tag === OpCodes.OP_EMPTY
@@ -103,7 +103,11 @@ export const try_: {
103
103
  try {
104
104
  return core.succeed(internalCall(evaluate))
105
105
  } catch (error) {
106
- return core.fail(onFailure ? internalCall(() => onFailure(error)) : new core.UnknownException(error))
106
+ return core.fail(
107
+ onFailure
108
+ ? internalCall(() => onFailure(error))
109
+ : new core.UnknownException(error, "An unknown error occurred in Effect.try")
110
+ )
107
111
  }
108
112
  })
109
113
  }
@@ -1644,17 +1648,20 @@ export const tryPromise: {
1644
1648
  evaluate = arg.try as (signal?: AbortSignal) => PromiseLike<A>
1645
1649
  catcher = arg.catch
1646
1650
  }
1651
+ const fail = (e: unknown) =>
1652
+ catcher
1653
+ ? core.failSync(() => catcher(e))
1654
+ : core.fail(new core.UnknownException(e, "An unknown error occurred in Effect.tryPromise"))
1647
1655
 
1648
1656
  if (evaluate.length >= 1) {
1649
1657
  return core.async((resolve, signal) => {
1650
1658
  try {
1651
- evaluate(signal)
1652
- .then(
1653
- (a) => resolve(core.exitSucceed(a)),
1654
- (e) => resolve(catcher ? core.failSync(() => catcher(e)) : core.fail(new core.UnknownException(e)))
1655
- )
1659
+ evaluate(signal).then(
1660
+ (a) => resolve(core.exitSucceed(a)),
1661
+ (e) => resolve(fail(e))
1662
+ )
1656
1663
  } catch (e) {
1657
- resolve(catcher ? core.failSync(() => catcher(e)) : core.fail(new core.UnknownException(e)))
1664
+ resolve(fail(e))
1658
1665
  }
1659
1666
  })
1660
1667
  }
@@ -1664,10 +1671,10 @@ export const tryPromise: {
1664
1671
  evaluate()
1665
1672
  .then(
1666
1673
  (a) => resolve(core.exitSucceed(a)),
1667
- (e) => resolve(catcher ? core.failSync(() => catcher(e)) : core.fail(new core.UnknownException(e)))
1674
+ (e) => resolve(fail(e))
1668
1675
  )
1669
1676
  } catch (e) {
1670
- resolve(catcher ? core.failSync(() => catcher(e)) : core.fail(new core.UnknownException(e)))
1677
+ resolve(fail(e))
1671
1678
  }
1672
1679
  })
1673
1680
  }
@@ -797,7 +797,8 @@ export const andThen: {
797
797
  return b
798
798
  } else if (isPromiseLike(b)) {
799
799
  return unsafeAsync<any, Cause.UnknownException>((resume) => {
800
- b.then((a) => resume(succeed(a)), (e) => resume(fail(new UnknownException(e))))
800
+ b.then((a) => resume(succeed(a)), (e) =>
801
+ resume(fail(new UnknownException(e, "An unknown error occurred in Effect.andThen"))))
801
802
  })
802
803
  }
803
804
  return succeed(b)
@@ -1281,7 +1282,8 @@ export const tap = dual<
1281
1282
  return as(b, a)
1282
1283
  } else if (isPromiseLike(b)) {
1283
1284
  return unsafeAsync<any, Cause.UnknownException>((resume) => {
1284
- b.then((_) => resume(succeed(a)), (e) => resume(fail(new UnknownException(e))))
1285
+ b.then((_) => resume(succeed(a)), (e) =>
1286
+ resume(fail(new UnknownException(e, "An unknown error occurred in Effect.tap"))))
1285
1287
  })
1286
1288
  }
1287
1289
  return succeed(a)
@@ -2208,7 +2210,10 @@ export const YieldableError: new(message?: string, options?: ErrorOptions) => Ca
2208
2210
  return fail(this)
2209
2211
  }
2210
2212
  toJSON() {
2211
- return { ...this }
2213
+ const obj = { ...this }
2214
+ if (this.message) obj.message = this.message
2215
+ if (this.cause) obj.cause = this.cause
2216
+ return obj
2212
2217
  }
2213
2218
  [NodeInspectSymbol]() {
2214
2219
  if (this.toString !== globalThis.Error.prototype.toString) {
@@ -2345,7 +2350,7 @@ export const UnknownException: new(cause: unknown, message?: string | undefined)
2345
2350
  class UnknownException extends YieldableError {
2346
2351
  readonly _tag = "UnknownException"
2347
2352
  readonly error: unknown
2348
- constructor(readonly cause: unknown, message?: string) {
2353
+ constructor(cause: unknown, message?: string) {
2349
2354
  super(message ?? "An unknown error occurred", { cause })
2350
2355
  this.error = cause
2351
2356
  }
@@ -152,18 +152,16 @@ export const groupBy = dual<
152
152
  ): GroupBy.GroupBy<K, V, E | E2, R | R2> =>
153
153
  make(
154
154
  stream.unwrapScoped(
155
- Effect.gen(function*($) {
156
- const decider = yield* $(
157
- Deferred.make<(key: K, value: V) => Effect.Effect<Predicate<number>>>()
158
- )
159
- const output = yield* $(Effect.acquireRelease(
155
+ Effect.gen(function*() {
156
+ const decider = yield* Deferred.make<(key: K, value: V) => Effect.Effect<Predicate<number>>>()
157
+ const output = yield* Effect.acquireRelease(
160
158
  Queue.bounded<Exit.Exit<readonly [K, Queue.Dequeue<Take.Take<V, E | E2>>], Option.Option<E | E2>>>(
161
159
  options?.bufferSize ?? 16
162
160
  ),
163
161
  (queue) => Queue.shutdown(queue)
164
- ))
165
- const ref = yield* $(Ref.make<Map<K, number>>(new Map()))
166
- const add = yield* $(
162
+ )
163
+ const ref = yield* Ref.make<Map<K, number>>(new Map())
164
+ const add = yield* pipe(
167
165
  stream.mapEffectSequential(self, f),
168
166
  stream.distributedWithDynamicCallback(
169
167
  options?.bufferSize ?? 16,
@@ -171,37 +169,35 @@ export const groupBy = dual<
171
169
  (exit) => Queue.offer(output, exit)
172
170
  )
173
171
  )
174
- yield* $(
175
- Deferred.succeed(decider, (key, _) =>
176
- pipe(
177
- Ref.get(ref),
178
- Effect.map((map) => Option.fromNullable(map.get(key))),
179
- Effect.flatMap(Option.match({
180
- onNone: () =>
181
- Effect.flatMap(add, ([index, queue]) =>
182
- Effect.zipRight(
183
- Ref.update(ref, (map) => map.set(key, index)),
184
- pipe(
185
- Queue.offer(
186
- output,
187
- Exit.succeed(
188
- [
189
- key,
190
- mapDequeue(queue, (exit) =>
191
- new take.TakeImpl(pipe(
192
- exit,
193
- Exit.map((tuple) => Chunk.of(tuple[1]))
194
- )))
195
- ] as const
196
- )
197
- ),
198
- Effect.as<Predicate<number>>((n: number) => n === index)
199
- )
200
- )),
201
- onSome: (index) => Effect.succeed<Predicate<number>>((n: number) => n === index)
202
- }))
203
- ))
204
- )
172
+ yield* Deferred.succeed(decider, (key, _) =>
173
+ pipe(
174
+ Ref.get(ref),
175
+ Effect.map((map) => Option.fromNullable(map.get(key))),
176
+ Effect.flatMap(Option.match({
177
+ onNone: () =>
178
+ Effect.flatMap(add, ([index, queue]) =>
179
+ Effect.zipRight(
180
+ Ref.update(ref, (map) => map.set(key, index)),
181
+ pipe(
182
+ Queue.offer(
183
+ output,
184
+ Exit.succeed(
185
+ [
186
+ key,
187
+ mapDequeue(queue, (exit) =>
188
+ new take.TakeImpl(pipe(
189
+ exit,
190
+ Exit.map((tuple) => Chunk.of(tuple[1]))
191
+ )))
192
+ ] as const
193
+ )
194
+ ),
195
+ Effect.as<Predicate<number>>((n: number) => n === index)
196
+ )
197
+ )),
198
+ onSome: (index) => Effect.succeed<Predicate<number>>((n: number) => n === index)
199
+ }))
200
+ ))
205
201
  return stream.flattenExitOption(stream.fromQueue(output, { shutdown: true }))
206
202
  })
207
203
  )
@@ -2,6 +2,7 @@ import type { DurationInput } from "../Duration.js"
2
2
  import * as Duration from "../Duration.js"
3
3
  import * as Effect from "../Effect.js"
4
4
  import * as FiberRef from "../FiberRef.js"
5
+ import { pipe } from "../Function.js"
5
6
  import { globalValue } from "../GlobalValue.js"
6
7
  import type * as RateLimiter from "../RateLimiter.js"
7
8
  import type * as Scope from "../Scope.js"
@@ -31,16 +32,16 @@ const tokenBucket = (limit: number, window: DurationInput): Effect.Effect<
31
32
  never,
32
33
  Scope.Scope
33
34
  > =>
34
- Effect.gen(function*(_) {
35
+ Effect.gen(function*() {
35
36
  const millisPerToken = Math.ceil(Duration.toMillis(window) / limit)
36
- const semaphore = yield* _(Effect.makeSemaphore(limit))
37
+ const semaphore = yield* Effect.makeSemaphore(limit)
37
38
  const latch = yield* Effect.makeSemaphore(0)
38
39
  const refill: Effect.Effect<void> = Effect.sleep(millisPerToken).pipe(
39
40
  Effect.zipRight(latch.releaseAll),
40
41
  Effect.zipRight(semaphore.release(1)),
41
42
  Effect.flatMap((free) => free === limit ? Effect.void : refill)
42
43
  )
43
- yield* _(
44
+ yield* pipe(
44
45
  latch.take(1),
45
46
  Effect.zipRight(refill),
46
47
  Effect.forever,
@@ -61,10 +62,10 @@ const fixedWindow = (limit: number, window: DurationInput): Effect.Effect<
61
62
  never,
62
63
  Scope.Scope
63
64
  > =>
64
- Effect.gen(function*(_) {
65
- const semaphore = yield* _(Effect.makeSemaphore(limit))
66
- const latch = yield* _(Effect.makeSemaphore(0))
67
- yield* _(
65
+ Effect.gen(function*() {
66
+ const semaphore = yield* Effect.makeSemaphore(limit)
67
+ const latch = yield* Effect.makeSemaphore(0)
68
+ yield* pipe(
68
69
  latch.take(1),
69
70
  Effect.zipRight(Effect.sleep(window)),
70
71
  Effect.zipRight(latch.releaseAll),