@livestore/adapter-web 0.3.0-dev.24 → 0.3.0-dev.25

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,7 +1,7 @@
1
- import { liveStoreStorageFormatVersion } from '@livestore/common'
1
+ import { liveStoreStorageFormatVersion, UnexpectedError } from '@livestore/common'
2
2
  import type { LiveStoreSchema } from '@livestore/common/schema'
3
3
  import { decodeSAHPoolFilename, HEADER_OFFSET_DATA } from '@livestore/sqlite-wasm/browser'
4
- import { Effect, Schema } from '@livestore/utils/effect'
4
+ import { Effect, Schedule, Schema } from '@livestore/utils/effect'
5
5
 
6
6
  import * as OpfsUtils from '../../opfs-utils.js'
7
7
  import type * as WorkerSchema from './worker-schema.js'
@@ -87,7 +87,12 @@ export const resetPersistedDataFromClientSession = ({
87
87
  Effect.gen(function* () {
88
88
  const directory = sanitizeOpfsDir(storageOptions.directory, storeId)
89
89
  yield* opfsDeleteAbs(directory)
90
- }).pipe(Effect.withSpan('@livestore/adapter-web:resetPersistedDataFromClientSession'))
90
+ }).pipe(
91
+ Effect.retry({
92
+ schedule: Schedule.exponentialBackoff10Sec,
93
+ }),
94
+ Effect.withSpan('@livestore/adapter-web:resetPersistedDataFromClientSession'),
95
+ )
91
96
 
92
97
  const opfsDeleteAbs = (absPath: string) =>
93
98
  Effect.promise(async () => {
@@ -114,7 +119,10 @@ const opfsDeleteAbs = (absPath: string) =>
114
119
  throw error
115
120
  }
116
121
  }
117
- }).pipe(Effect.withSpan('@livestore/adapter-web:worker:opfsDeleteFile', { attributes: { absFilePath: absPath } }))
122
+ }).pipe(
123
+ UnexpectedError.mapToUnexpectedError,
124
+ Effect.withSpan('@livestore/adapter-web:worker:opfsDeleteFile', { attributes: { absFilePath: absPath } }),
125
+ )
118
126
 
119
127
  export const sanitizeOpfsDir = (directory: string | undefined, storeId: string) => {
120
128
  // Root dir should be `''` not `/`
@@ -8,7 +8,7 @@ import {
8
8
  UnexpectedError,
9
9
  } from '@livestore/common'
10
10
  import { EventId, MutationEvent } from '@livestore/common/schema'
11
- import * as WebMeshWorker from '@livestore/devtools-web-common/worker'
11
+ import * as WebmeshWorker from '@livestore/devtools-web-common/worker'
12
12
  import { Schema, Transferable } from '@livestore/utils/effect'
13
13
 
14
14
  export const StorageTypeOpfs = Schema.Struct({
@@ -159,7 +159,7 @@ export namespace LeaderWorkerInner {
159
159
  NetworkStatusStream,
160
160
  Shutdown,
161
161
  ExtraDevtoolsMessage,
162
- WebMeshWorker.Schema.CreateConnection,
162
+ WebmeshWorker.Schema.CreateConnection,
163
163
  )
164
164
  export type Request = typeof Request.Type
165
165
  }
@@ -202,6 +202,6 @@ export namespace SharedWorker {
202
202
  LeaderWorkerInner.Shutdown,
203
203
  LeaderWorkerInner.ExtraDevtoolsMessage,
204
204
 
205
- WebMeshWorker.Schema.CreateConnection,
205
+ WebmeshWorker.Schema.CreateConnection,
206
206
  ) {}
207
207
  }
@@ -10,7 +10,7 @@ import {
10
10
  import type { LiveStoreSchema } from '@livestore/common/schema'
11
11
  import { MutationEvent } from '@livestore/common/schema'
12
12
  import { makeChannelForConnectedMeshNode } from '@livestore/devtools-web-common/web-channel'
13
- import * as WebMeshWorker from '@livestore/devtools-web-common/worker'
13
+ import * as WebmeshWorker from '@livestore/devtools-web-common/worker'
14
14
  import { sqliteDbFactory } from '@livestore/sqlite-wasm/browser'
15
15
  import { loadSqlite3Wasm } from '@livestore/sqlite-wasm/load-wasm'
16
16
  import { isDevEnv, LS_DEV } from '@livestore/utils'
@@ -97,7 +97,7 @@ const makeWorkerRunnerOuter = (
97
97
  Effect.scoped,
98
98
  Effect.withSpan('@livestore/adapter-web:worker:wrapper:InitialMessage:innerFiber'),
99
99
  Effect.tapCauseLogPretty,
100
- Effect.provide(WebMeshWorker.CacheService.layer({ nodeName: `leader-${storeId}-${clientId}` })),
100
+ Effect.provide(WebmeshWorker.CacheService.layer({ nodeName: `leader-${storeId}-${clientId}` })),
101
101
  Effect.forkScoped,
102
102
  )
103
103
 
@@ -242,7 +242,7 @@ const makeWorkerRunnerInner = ({ schema, sync: syncOptions }: WorkerOptions) =>
242
242
  UnexpectedError.mapToUnexpectedError,
243
243
  Effect.withSpan('@livestore/adapter-web:worker:ExtraDevtoolsMessage'),
244
244
  ),
245
- 'DevtoolsWebCommon.CreateConnection': WebMeshWorker.CreateConnection,
245
+ 'DevtoolsWebCommon.CreateConnection': WebmeshWorker.CreateConnection,
246
246
  })
247
247
 
248
248
  const makeDevtoolsOptions = ({
@@ -253,12 +253,12 @@ const makeDevtoolsOptions = ({
253
253
  devtoolsEnabled: boolean
254
254
  dbReadModel: SqliteDb
255
255
  dbMutationLog: SqliteDb
256
- }): Effect.Effect<DevtoolsOptions, UnexpectedError, Scope.Scope | WebMeshWorker.CacheService> =>
256
+ }): Effect.Effect<DevtoolsOptions, UnexpectedError, Scope.Scope | WebmeshWorker.CacheService> =>
257
257
  Effect.gen(function* () {
258
258
  if (devtoolsEnabled === false) {
259
259
  return { enabled: false }
260
260
  }
261
- const { node } = yield* WebMeshWorker.CacheService
261
+ const { node } = yield* WebmeshWorker.CacheService
262
262
 
263
263
  return {
264
264
  enabled: true,
@@ -1,12 +1,11 @@
1
1
  import { UnexpectedError } from '@livestore/common'
2
2
  import { connectViaWorker } from '@livestore/devtools-web-common/web-channel'
3
- import * as WebMeshWorker from '@livestore/devtools-web-common/worker'
3
+ import * as WebmeshWorker from '@livestore/devtools-web-common/worker'
4
4
  import { isDevEnv, isNotUndefined, LS_DEV } from '@livestore/utils'
5
5
  import {
6
6
  BrowserWorker,
7
7
  BrowserWorkerRunner,
8
8
  Deferred,
9
- Duration,
10
9
  Effect,
11
10
  Exit,
12
11
  FetchHttpClient,
@@ -15,7 +14,6 @@ import {
15
14
  Logger,
16
15
  LogLevel,
17
16
  ParseResult,
18
- Queue,
19
17
  Ref,
20
18
  Schema,
21
19
  Scope,
@@ -65,6 +63,7 @@ const makeWorkerRunner = Effect.gen(function* () {
65
63
  Effect.andThen((worker) => worker.executeEffect(req) as Effect.Effect<unknown, unknown, never>),
66
64
  // Effect.tap((_) => Effect.log(`forwardRequest: ${req._tag}`, _)),
67
65
  // Effect.tapError((cause) => Effect.logError(`forwardRequest err: ${req._tag}`, cause)),
66
+ Effect.interruptible,
68
67
  Effect.logWarnIfTakesLongerThan({
69
68
  label: `@livestore/adapter-web:shared-worker:forwardRequest:${req._tag}`,
70
69
  duration: 500,
@@ -80,57 +79,35 @@ const makeWorkerRunner = Effect.gen(function* () {
80
79
  Effect.tapCauseLogPretty,
81
80
  ) as any
82
81
 
83
- // const forwardRequestStream = <TReq extends WorkerSchema.DedicatedWorkerInner.Request>(
84
- // req: TReq,
85
- // ): TReq extends Serializable.WithResult<infer A, infer _I, infer _E, infer _EI, infer _R>
86
- // ? Stream.Stream<A, UnexpectedError, never>
87
- // : never =>
88
- // waitForWorker.pipe(
89
- // Effect.logBefore(`forwardRequestStream: ${req._tag}`),
90
- // Effect.andThen((worker) => worker.execute(req) as Stream.Stream<unknown, unknown, never>),
91
- // Effect.interruptible,
92
- // UnexpectedError.mapToUnexpectedError,
93
- // Effect.tapCauseLogPretty,
94
- // Stream.unwrap,
95
- // Stream.ensuring(Effect.logDebug(`shutting down stream for ${req._tag}`)),
96
- // UnexpectedError.mapToUnexpectedErrorStream,
97
- // ) as any
98
-
99
- // TODO bring back the `forwardRequestStream` impl above. Needs debugging with Tim Smart
100
- // It seems the in-progress streams are not being closed properly if the worker is closed (e.g. by closing the leader tab)
101
82
  const forwardRequestStream = <TReq extends WorkerSchema.LeaderWorkerInner.Request>(
102
83
  req: TReq,
103
84
  ): TReq extends Schema.WithResult<infer A, infer _I, infer _E, infer _EI, infer _R>
104
85
  ? Stream.Stream<A, UnexpectedError, never>
105
86
  : never =>
106
87
  Effect.gen(function* () {
88
+ yield* Effect.logDebug(`forwardRequestStream: ${req._tag}`)
107
89
  const { worker, scope } = yield* SubscriptionRef.waitUntil(leaderWorkerContextSubRef, isNotUndefined)
108
- const queue = yield* Queue.unbounded()
90
+ const stream = worker.execute(req) as Stream.Stream<unknown, unknown, never>
109
91
 
110
- yield* Scope.addFinalizer(scope, Queue.shutdown(queue))
92
+ // It seems the request stream is not automatically interrupted when the scope shuts down
93
+ // so we need to manually interrupt it when the scope shuts down
94
+ const shutdownDeferred = yield* Deferred.make<void>()
95
+ yield* Scope.addFinalizer(scope, Deferred.succeed(shutdownDeferred, undefined))
111
96
 
112
- const workerStream = worker.execute(req) as Stream.Stream<unknown, unknown, never>
97
+ // Here we're creating an empty stream that will finish when the scope shuts down
98
+ const scopeShutdownStream = Effect.gen(function* () {
99
+ yield* shutdownDeferred
100
+ return Stream.empty
101
+ }).pipe(Stream.unwrap)
113
102
 
114
- yield* workerStream.pipe(
115
- Stream.tap((_) => Queue.offer(queue, _)),
116
- Stream.runDrain,
117
- Effect.interruptible,
118
- Effect.forkIn(scope),
119
- )
120
-
121
- return Stream.fromQueue(queue)
103
+ return Stream.merge(stream, scopeShutdownStream, { haltStrategy: 'either' })
122
104
  }).pipe(
105
+ Effect.interruptible,
123
106
  UnexpectedError.mapToUnexpectedError,
124
107
  Effect.tapCauseLogPretty,
125
108
  Stream.unwrap,
126
- Stream.mapError((cause) =>
127
- Schema.is(UnexpectedError)(cause)
128
- ? cause
129
- : ParseResult.isParseError(cause) || Schema.is(WorkerError.WorkerError)(cause)
130
- ? new UnexpectedError({ cause })
131
- : cause,
132
- ),
133
- // Stream.ensuring(Effect.logDebug(`shutting down stream for ${req._tag}`)),
109
+ Stream.ensuring(Effect.logDebug(`shutting down stream for ${req._tag}`)),
110
+ UnexpectedError.mapToUnexpectedErrorStream,
134
111
  ) as any
135
112
 
136
113
  const resetCurrentWorkerCtx = Effect.gen(function* () {
@@ -142,14 +119,10 @@ const makeWorkerRunner = Effect.gen(function* () {
142
119
  yield* Effect.yieldNow()
143
120
 
144
121
  yield* Scope.close(prevWorker.scope, Exit.void).pipe(
145
- // TODO there still seem to be scenarios where it takes longer than 1 second which is leading to problems
146
- Effect.timeout(Duration.seconds(1)),
147
122
  Effect.logWarnIfTakesLongerThan({
148
123
  label: '@livestore/adapter-web:shared-worker:close-previous-worker',
149
124
  duration: 500,
150
125
  }),
151
- // Effect.catchTag('TimeoutException', () => Scope.close(prevWorker.scope, Exit.fail('boom'))),
152
- Effect.ignoreLogged,
153
126
  )
154
127
  }
155
128
  }).pipe(Effect.withSpan('@livestore/adapter-web:shared-worker:resetCurrentWorkerCtx'))
@@ -208,26 +181,6 @@ const makeWorkerRunner = Effect.gen(function* () {
208
181
 
209
182
  const scope = yield* Scope.make()
210
183
 
211
- const workerDeferred = yield* Deferred.make<
212
- Worker.SerializedWorkerPool<WorkerSchema.LeaderWorkerInner.Request>,
213
- UnexpectedError
214
- >()
215
- // TODO we could also keep the pool instance around to re-use it by removing the previous worker and adding a new one
216
- yield* Worker.makePoolSerialized<WorkerSchema.LeaderWorkerInner.Request>({
217
- size: 1,
218
- concurrency: 100,
219
- initialMessage: () => initialMessagePayload.initialMessage,
220
- }).pipe(
221
- Effect.tap((worker) => Deferred.succeed(workerDeferred, worker)),
222
- Effect.provide(BrowserWorker.layer(() => port)),
223
- Effect.catchAllCause((cause) => new UnexpectedError({ cause })),
224
- Effect.tapError((cause) => Deferred.fail(workerDeferred, cause)),
225
- Effect.withSpan('@livestore/adapter-web:shared-worker:makeWorkerProxyFromPort'),
226
- Effect.tapCauseLogPretty,
227
- Scope.extend(scope),
228
- Effect.forkIn(scope),
229
- )
230
-
231
184
  yield* Effect.gen(function* () {
232
185
  const shutdownChannel = yield* makeShutdownChannel(initialMessagePayload.initialMessage.storeId)
233
186
 
@@ -235,22 +188,32 @@ const makeWorkerRunner = Effect.gen(function* () {
235
188
  Stream.flatten(),
236
189
  Stream.tap(() => reset),
237
190
  Stream.runDrain,
191
+ Effect.tapCauseLogPretty,
192
+ Effect.forkScoped,
238
193
  )
239
- }).pipe(Effect.tapCauseLogPretty, Scope.extend(scope), Effect.forkIn(scope))
240
194
 
241
- const worker = yield* workerDeferred
195
+ const workerLayer = yield* Layer.build(BrowserWorker.layer(() => port))
242
196
 
243
- // Prepare the web mesh connection for leader worker to be able to connect to the devtools
244
- const { node } = yield* WebMeshWorker.CacheService
245
- const { storeId, clientId } = initialMessagePayload.initialMessage
197
+ const worker = yield* Worker.makePoolSerialized<WorkerSchema.LeaderWorkerInner.Request>({
198
+ size: 1,
199
+ concurrency: 100,
200
+ initialMessage: () => initialMessagePayload.initialMessage,
201
+ }).pipe(
202
+ Effect.provide(workerLayer),
203
+ Effect.withSpan('@livestore/adapter-web:shared-worker:makeWorkerProxyFromPort'),
204
+ )
246
205
 
247
- yield* connectViaWorker({ node, worker, target: `leader-${storeId}-${clientId}` }).pipe(
248
- Effect.tapCauseLogPretty,
249
- Scope.extend(scope),
250
- Effect.forkIn(scope),
251
- )
206
+ // Prepare the web mesh connection for leader worker to be able to connect to the devtools
207
+ const { node } = yield* WebmeshWorker.CacheService
208
+ const { storeId, clientId } = initialMessagePayload.initialMessage
252
209
 
253
- yield* SubscriptionRef.set(leaderWorkerContextSubRef, { worker, scope })
210
+ yield* connectViaWorker({ node, worker, target: `leader-${storeId}-${clientId}` }).pipe(
211
+ Effect.tapCauseLogPretty,
212
+ Effect.forkScoped,
213
+ )
214
+
215
+ yield* SubscriptionRef.set(leaderWorkerContextSubRef, { worker, scope })
216
+ }).pipe(Effect.tapCauseLogPretty, Scope.extend(scope), Effect.forkIn(scope))
254
217
  }).pipe(
255
218
  Effect.withSpan('@livestore/adapter-web:shared-worker:updateMessagePort'),
256
219
  UnexpectedError.mapToUnexpectedError,
@@ -272,9 +235,7 @@ const makeWorkerRunner = Effect.gen(function* () {
272
235
  ExtraDevtoolsMessage: forwardRequest,
273
236
 
274
237
  // Accept devtools connections (from leader and client sessions)
275
- 'DevtoolsWebCommon.CreateConnection': WebMeshWorker.CreateConnection,
276
-
277
- // ...devtoolsWebBridge.handlers,
238
+ 'DevtoolsWebCommon.CreateConnection': WebmeshWorker.CreateConnection,
278
239
  })
279
240
  }).pipe(Layer.unwrapScoped)
280
241
 
@@ -288,7 +249,7 @@ export const makeWorker = () => {
288
249
  Effect.annotateLogs({ thread: self.name }),
289
250
  Effect.provide(Logger.prettyWithThread(self.name)),
290
251
  Effect.provide(FetchHttpClient.layer),
291
- Effect.provide(WebMeshWorker.CacheService.layer({ nodeName: 'shared-worker' })),
252
+ Effect.provide(WebmeshWorker.CacheService.layer({ nodeName: 'shared-worker' })),
292
253
  LS_DEV ? TaskTracing.withAsyncTaggingTracing((name) => (console as any).createTask(name)) : identity,
293
254
  // TODO remove type-cast (currently needed to silence a tsc bug)
294
255
  (_) => _ as any as Effect.Effect<void, any>,