@modelnex/sdk 0.5.33 → 0.5.34

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/dist/index.js +138 -81
  2. package/dist/index.mjs +138 -81
  3. package/package.json +1 -1
package/dist/index.js CHANGED
@@ -2879,7 +2879,8 @@ function sanitizePersistedRecordingSession(raw) {
2879
2879
  captureEvents: raw.captureEvents,
2880
2880
  capturedTranscript: typeof raw.capturedTranscript === "string" ? raw.capturedTranscript : "",
2881
2881
  pendingClicks: raw.pendingClicks,
2882
- selectedStepType: raw.selectedStepType
2882
+ selectedStepType: raw.selectedStepType,
2883
+ voiceCaptureEnabled: raw.voiceCaptureEnabled === true
2883
2884
  };
2884
2885
  }
2885
2886
  function readPersistedRecordingSession() {
@@ -6871,6 +6872,10 @@ function useRecordingMode({
6871
6872
  const stepsRef = (0, import_react16.useRef)([]);
6872
6873
  stepsRef.current = steps;
6873
6874
  const captureEventsRef = (0, import_react16.useRef)([]);
6875
+ const capturedTranscriptRef = (0, import_react16.useRef)(capturedTranscript);
6876
+ capturedTranscriptRef.current = capturedTranscript;
6877
+ const phaseRef = (0, import_react16.useRef)(phase);
6878
+ phaseRef.current = phase;
6874
6879
  const safeSpeak = (0, import_react16.useCallback)((text) => {
6875
6880
  void voice.speak(text).catch((err) => {
6876
6881
  console.warn("[Recording] Voice playback unavailable:", err);
@@ -6878,7 +6883,7 @@ function useRecordingMode({
6878
6883
  }, [voice]);
6879
6884
  captureEventsRef.current = captureEvents;
6880
6885
  const pendingClicksRef = (0, import_react16.useRef)(restoredSession?.pendingClicks ?? []);
6881
- const shouldKeepVoiceCaptureRef = (0, import_react16.useRef)(false);
6886
+ const shouldKeepVoiceCaptureRef = (0, import_react16.useRef)(restoredSession?.voiceCaptureEnabled === true);
6882
6887
  const resumeVoiceAfterNarrationRef = (0, import_react16.useRef)(false);
6883
6888
  const lastAutoNoteRef = (0, import_react16.useRef)("");
6884
6889
  const lastHoverKeyRef = (0, import_react16.useRef)("");
@@ -6888,19 +6893,43 @@ function useRecordingMode({
6888
6893
  const isRecording = phase !== "idle";
6889
6894
  const stepCount = steps.length;
6890
6895
  const captureEventCount = captureEvents.length;
6891
- const appendCaptureEvent = (0, import_react16.useCallback)((event) => {
6892
- setCaptureEvents((prev) => {
6893
- const nextEvent = {
6894
- id: event.id || newCaptureId(event.type),
6895
- order: prev.length,
6896
- timestamp: event.timestamp || (/* @__PURE__ */ new Date()).toISOString(),
6897
- ...event
6898
- };
6899
- return [...prev, nextEvent];
6896
+ const persistSnapshot = (0, import_react16.useCallback)((overrides) => {
6897
+ const nextPhase = overrides?.phase ?? phaseRef.current;
6898
+ const nextSteps = overrides?.steps ?? stepsRef.current;
6899
+ const nextCaptureEvents = overrides?.captureEvents ?? captureEventsRef.current;
6900
+ const nextTranscript = overrides?.capturedTranscript ?? capturedTranscriptRef.current;
6901
+ const nextSelectedStepType = overrides?.selectedStepType ?? selectedStepTypeRef.current;
6902
+ const nextVoiceCaptureEnabled = overrides?.voiceCaptureEnabled ?? shouldKeepVoiceCaptureRef.current;
6903
+ if (nextPhase === "idle" && nextSteps.length === 0 && nextCaptureEvents.length === 0 && !nextTranscript.trim()) {
6904
+ clearPersistedRecordingSession();
6905
+ return;
6906
+ }
6907
+ persistRecordingSession({
6908
+ version: 1,
6909
+ experienceType,
6910
+ phase: nextPhase,
6911
+ steps: nextSteps,
6912
+ captureEvents: nextCaptureEvents,
6913
+ capturedTranscript: nextTranscript,
6914
+ pendingClicks: pendingClicksRef.current,
6915
+ selectedStepType: nextSelectedStepType,
6916
+ voiceCaptureEnabled: nextVoiceCaptureEnabled
6900
6917
  });
6901
- }, []);
6918
+ }, [experienceType]);
6919
+ const appendCaptureEvent = (0, import_react16.useCallback)((event) => {
6920
+ const nextEvent = {
6921
+ id: event.id || newCaptureId(event.type),
6922
+ order: captureEventsRef.current.length,
6923
+ timestamp: event.timestamp || (/* @__PURE__ */ new Date()).toISOString(),
6924
+ ...event
6925
+ };
6926
+ const nextCaptureEvents = [...captureEventsRef.current, nextEvent];
6927
+ captureEventsRef.current = nextCaptureEvents;
6928
+ setCaptureEvents(nextCaptureEvents);
6929
+ persistSnapshot({ captureEvents: nextCaptureEvents });
6930
+ }, [persistSnapshot]);
6902
6931
  const updateCaptureEvent = (0, import_react16.useCallback)((id, metadataPatch) => {
6903
- setCaptureEvents((prev) => prev.map((event) => {
6932
+ const nextCaptureEvents = captureEventsRef.current.map((event) => {
6904
6933
  if (event.id === id) {
6905
6934
  return {
6906
6935
  ...event,
@@ -6911,14 +6940,19 @@ function useRecordingMode({
6911
6940
  };
6912
6941
  }
6913
6942
  return event;
6914
- }));
6915
- }, []);
6943
+ });
6944
+ captureEventsRef.current = nextCaptureEvents;
6945
+ setCaptureEvents(nextCaptureEvents);
6946
+ persistSnapshot({ captureEvents: nextCaptureEvents });
6947
+ }, [persistSnapshot]);
6916
6948
  const appendVoiceNote = (0, import_react16.useCallback)((transcript) => {
6917
6949
  const text = transcript.trim();
6918
6950
  if (!text) return;
6919
6951
  if (lastAutoNoteRef.current === text) return;
6920
6952
  lastAutoNoteRef.current = text;
6921
- setCapturedTranscript((prev) => prev ? `${prev} ${text}` : text);
6953
+ const nextTranscript = capturedTranscriptRef.current ? `${capturedTranscriptRef.current} ${text}` : text;
6954
+ capturedTranscriptRef.current = nextTranscript;
6955
+ setCapturedTranscript(nextTranscript);
6922
6956
  const recordedClicks = [...pendingClicksRef.current];
6923
6957
  const lastClick = recordedClicks[recordedClicks.length - 1];
6924
6958
  const newStep = {
@@ -6952,7 +6986,10 @@ function useRecordingMode({
6952
6986
  note: "voice_capture"
6953
6987
  }
6954
6988
  };
6955
- setSteps((prev) => [...prev, newStep]);
6989
+ const nextSteps = [...stepsRef.current, newStep];
6990
+ stepsRef.current = nextSteps;
6991
+ setSteps(nextSteps);
6992
+ persistSnapshot({ steps: nextSteps, capturedTranscript: nextTranscript });
6956
6993
  appendCaptureEvent({
6957
6994
  type: "note",
6958
6995
  url: getCurrentTourUrl(),
@@ -6968,7 +7005,7 @@ function useRecordingMode({
6968
7005
  stepOrder: newStep.order
6969
7006
  }
6970
7007
  });
6971
- }, [appendCaptureEvent]);
7008
+ }, [appendCaptureEvent, persistSnapshot]);
6972
7009
  const stopBackgroundVoiceCapture = (0, import_react16.useCallback)(() => {
6973
7010
  voice.stopListening();
6974
7011
  setIsVoiceCaptureActive(false);
@@ -6991,24 +7028,37 @@ function useRecordingMode({
6991
7028
  setIsVoiceCaptureActive(true);
6992
7029
  }, [appendVoiceNote, isRecording, phase, voice]);
6993
7030
  const startRecording = (0, import_react16.useCallback)(() => {
6994
- setPhase("active");
6995
- setSteps([]);
6996
- pendingClicksRef.current = [];
6997
- setCaptureEvents([{
7031
+ const nextPhase = "active";
7032
+ const nextCaptureEvents = [{
6998
7033
  id: newCaptureId("session_start"),
6999
7034
  order: 0,
7000
7035
  timestamp: (/* @__PURE__ */ new Date()).toISOString(),
7001
7036
  type: "session_start",
7002
7037
  url: getCurrentTourUrl(),
7003
7038
  metadata: { mode: "capture_v1" }
7004
- }]);
7039
+ }];
7040
+ phaseRef.current = nextPhase;
7041
+ setPhase(nextPhase);
7042
+ stepsRef.current = [];
7043
+ setSteps([]);
7044
+ pendingClicksRef.current = [];
7045
+ captureEventsRef.current = nextCaptureEvents;
7046
+ setCaptureEvents(nextCaptureEvents);
7047
+ capturedTranscriptRef.current = "";
7005
7048
  setCapturedTranscript("");
7006
7049
  setIsVoiceCaptureActive(false);
7007
7050
  shouldKeepVoiceCaptureRef.current = false;
7008
7051
  lastAutoNoteRef.current = "";
7009
7052
  lastHoverKeyRef.current = "";
7010
7053
  lastHoverAtRef.current = 0;
7011
- }, []);
7054
+ persistSnapshot({
7055
+ phase: nextPhase,
7056
+ steps: [],
7057
+ captureEvents: nextCaptureEvents,
7058
+ capturedTranscript: "",
7059
+ voiceCaptureEnabled: false
7060
+ });
7061
+ }, [persistSnapshot]);
7012
7062
  const markStep = (0, import_react16.useCallback)(() => {
7013
7063
  if (phase !== "active") return;
7014
7064
  setPhase("selecting");
@@ -7138,7 +7188,9 @@ function useRecordingMode({
7138
7188
  note: raw
7139
7189
  }
7140
7190
  };
7141
- setSteps((prev) => [...prev, newStep]);
7191
+ const nextSteps = [...stepsRef.current, newStep];
7192
+ stepsRef.current = nextSteps;
7193
+ setSteps(nextSteps);
7142
7194
  appendCaptureEvent({
7143
7195
  type: "note",
7144
7196
  url: getCurrentTourUrl(),
@@ -7157,14 +7209,16 @@ function useRecordingMode({
7157
7209
  setSelectedElement(null);
7158
7210
  setPendingNarration("");
7159
7211
  setPolishedNarration("");
7212
+ phaseRef.current = "active";
7160
7213
  setPhase("active");
7214
+ persistSnapshot({ steps: nextSteps, phase: "active" });
7161
7215
  safeSpeak(`Note ${newStep.order + 1} saved. Keep performing the workflow or add another note.`);
7162
7216
  if (resumeVoiceAfterNarrationRef.current || shouldKeepVoiceCaptureRef.current) {
7163
7217
  window.setTimeout(() => {
7164
7218
  if (shouldKeepVoiceCaptureRef.current) startBackgroundVoiceCapture();
7165
7219
  }, 0);
7166
7220
  }
7167
- }, [appendCaptureEvent, phase, polishedNarration, pendingNarration, selectedElement, selectedStepType, startBackgroundVoiceCapture, voice]);
7221
+ }, [appendCaptureEvent, phase, polishedNarration, pendingNarration, persistSnapshot, selectedElement, selectedStepType, startBackgroundVoiceCapture, voice]);
7168
7222
  const redoNarration = (0, import_react16.useCallback)(() => {
7169
7223
  if (phase !== "reviewing") return;
7170
7224
  setPendingNarration("");
@@ -7176,23 +7230,27 @@ function useRecordingMode({
7176
7230
  setPolishedNarration(text);
7177
7231
  }, []);
7178
7232
  const continueRecording = (0, import_react16.useCallback)(() => {
7233
+ phaseRef.current = "active";
7179
7234
  setPhase("active");
7235
+ persistSnapshot({ phase: "active" });
7180
7236
  if (shouldKeepVoiceCaptureRef.current) {
7181
7237
  window.setTimeout(() => {
7182
7238
  if (shouldKeepVoiceCaptureRef.current) startBackgroundVoiceCapture();
7183
7239
  }, 0);
7184
7240
  }
7185
- }, [startBackgroundVoiceCapture]);
7241
+ }, [persistSnapshot, startBackgroundVoiceCapture]);
7186
7242
  const undoLastStep = (0, import_react16.useCallback)(() => {
7187
- setSteps((prev) => {
7188
- const next = prev.slice(0, -1);
7189
- safeSpeak(
7190
- next.length > 0 ? `Step ${prev.length} removed. ${next.length} step${next.length !== 1 ? "s" : ""} remaining.` : "Step removed. No steps recorded yet."
7191
- );
7192
- return next;
7193
- });
7243
+ const previous = stepsRef.current;
7244
+ const nextSteps = previous.slice(0, -1);
7245
+ stepsRef.current = nextSteps;
7246
+ setSteps(nextSteps);
7247
+ safeSpeak(
7248
+ nextSteps.length > 0 ? `Step ${previous.length} removed. ${nextSteps.length} step${nextSteps.length !== 1 ? "s" : ""} remaining.` : "Step removed. No steps recorded yet."
7249
+ );
7250
+ phaseRef.current = "active";
7194
7251
  setPhase("active");
7195
- }, [voice]);
7252
+ persistSnapshot({ steps: nextSteps, phase: "active" });
7253
+ }, [persistSnapshot, voice]);
7196
7254
  const previewSteps = (0, import_react16.useCallback)(() => {
7197
7255
  if (steps.length === 0) {
7198
7256
  safeSpeak("No steps recorded yet.");
@@ -7205,18 +7263,24 @@ function useRecordingMode({
7205
7263
  shouldKeepVoiceCaptureRef.current = false;
7206
7264
  resumeVoiceAfterNarrationRef.current = false;
7207
7265
  stopBackgroundVoiceCapture();
7208
- }, [stopBackgroundVoiceCapture]);
7266
+ persistSnapshot({ voiceCaptureEnabled: false });
7267
+ }, [persistSnapshot, stopBackgroundVoiceCapture]);
7209
7268
  const cancelRecording = (0, import_react16.useCallback)(() => {
7210
7269
  shouldKeepVoiceCaptureRef.current = false;
7211
7270
  stopBackgroundVoiceCapture();
7271
+ phaseRef.current = "idle";
7212
7272
  setPhase("idle");
7273
+ stepsRef.current = [];
7213
7274
  setSteps([]);
7275
+ captureEventsRef.current = [];
7214
7276
  setCaptureEvents([]);
7277
+ capturedTranscriptRef.current = "";
7215
7278
  setCapturedTranscript("");
7216
7279
  pendingClicksRef.current = [];
7217
7280
  setSelectedElement(null);
7218
7281
  setPendingNarration("");
7219
7282
  setPolishedNarration("");
7283
+ clearPersistedRecordingSession();
7220
7284
  safeSpeak("Recording cancelled.");
7221
7285
  }, [safeSpeak, stopBackgroundVoiceCapture]);
7222
7286
  (0, import_react16.useEffect)(() => {
@@ -7260,22 +7324,20 @@ function useRecordingMode({
7260
7324
  }
7261
7325
  const eventId = newCaptureId("click");
7262
7326
  const timestamp = (/* @__PURE__ */ new Date()).toISOString();
7327
+ appendCaptureEvent({
7328
+ id: eventId,
7329
+ timestamp,
7330
+ type: "click",
7331
+ url: click.url,
7332
+ label: click.label,
7333
+ tagName: click.tagName,
7334
+ testId: click.testId,
7335
+ fingerprint: click.fingerprint,
7336
+ textContaining: click.textContaining,
7337
+ metadata: buildCaptureEventMetadata(interactiveTarget, "click")
7338
+ });
7263
7339
  void captureScreenshot().then((beforeScreenshot) => {
7264
- appendCaptureEvent({
7265
- id: eventId,
7266
- timestamp,
7267
- type: "click",
7268
- url: click.url,
7269
- label: click.label,
7270
- tagName: click.tagName,
7271
- testId: click.testId,
7272
- fingerprint: click.fingerprint,
7273
- textContaining: click.textContaining,
7274
- metadata: {
7275
- ...buildCaptureEventMetadata(interactiveTarget, "click"),
7276
- beforeScreenshot
7277
- }
7278
- });
7340
+ updateCaptureEvent(eventId, { beforeScreenshot });
7279
7341
  window.setTimeout(async () => {
7280
7342
  try {
7281
7343
  const afterScreenshot = await captureScreenshot();
@@ -7286,18 +7348,6 @@ function useRecordingMode({
7286
7348
  }, 1200);
7287
7349
  }).catch((err) => {
7288
7350
  console.warn("[Recording] Failed to capture before-click screenshot:", err);
7289
- appendCaptureEvent({
7290
- id: eventId,
7291
- timestamp,
7292
- type: "click",
7293
- url: click.url,
7294
- label: click.label,
7295
- tagName: click.tagName,
7296
- testId: click.testId,
7297
- fingerprint: click.fingerprint,
7298
- textContaining: click.textContaining,
7299
- metadata: buildCaptureEventMetadata(interactiveTarget, "click")
7300
- });
7301
7351
  });
7302
7352
  };
7303
7353
  const handleInputEvent = (event) => {
@@ -7343,10 +7393,24 @@ function useRecordingMode({
7343
7393
  window.clearInterval(routePoll);
7344
7394
  };
7345
7395
  }, [appendCaptureEvent, isRecording]);
7396
+ (0, import_react16.useEffect)(() => {
7397
+ if (!isRecording || typeof window === "undefined") return;
7398
+ const flushRecordingSession = () => {
7399
+ persistSnapshot();
7400
+ };
7401
+ window.addEventListener("pagehide", flushRecordingSession);
7402
+ window.addEventListener("beforeunload", flushRecordingSession);
7403
+ return () => {
7404
+ window.removeEventListener("pagehide", flushRecordingSession);
7405
+ window.removeEventListener("beforeunload", flushRecordingSession);
7406
+ };
7407
+ }, [isRecording, persistSnapshot]);
7346
7408
  const stopRecording = (0, import_react16.useCallback)(async (tourName, targetUserTypes) => {
7409
+ phaseRef.current = "finishing";
7347
7410
  setPhase("finishing");
7348
7411
  shouldKeepVoiceCaptureRef.current = false;
7349
7412
  stopBackgroundVoiceCapture();
7413
+ persistSnapshot({ phase: "finishing", voiceCaptureEnabled: false });
7350
7414
  if (steps.length === 0 && captureEventsRef.current.length <= 1) {
7351
7415
  safeSpeak("No workflow captured yet.");
7352
7416
  setPhase("idle");
@@ -7392,11 +7456,16 @@ function useRecordingMode({
7392
7456
  throw new Error(errorMessage);
7393
7457
  }
7394
7458
  const data = await res.json();
7459
+ stepsRef.current = [];
7395
7460
  setSteps([]);
7461
+ captureEventsRef.current = [];
7396
7462
  setCaptureEvents([]);
7463
+ capturedTranscriptRef.current = "";
7397
7464
  setCapturedTranscript("");
7398
7465
  pendingClicksRef.current = [];
7466
+ phaseRef.current = "idle";
7399
7467
  setPhase("idle");
7468
+ clearPersistedRecordingSession();
7400
7469
  const savedStepCount = Array.isArray(data?.tour?.steps) ? data.tour.steps.length : 0;
7401
7470
  const itemLabel = experienceType === "onboarding" ? "workflow" : "tour";
7402
7471
  safeSpeak(`${itemLabel === "tour" ? "Tour" : "Workflow"} "${tourName}" saved as a draft with ${savedStepCount} generated steps. Review and preview it before publishing.`);
@@ -7404,19 +7473,23 @@ function useRecordingMode({
7404
7473
  } catch (err) {
7405
7474
  console.error("[Recording] Failed to save tour:", err);
7406
7475
  safeSpeak(`Failed to save the ${experienceType === "onboarding" ? "workflow" : "tour"}. Please try again.`);
7476
+ phaseRef.current = "active";
7407
7477
  setPhase("active");
7478
+ persistSnapshot({ phase: "active" });
7408
7479
  return null;
7409
7480
  }
7410
- }, [capturedTranscript, experienceType, safeSpeak, serverUrl, stopBackgroundVoiceCapture, steps, toursApiBase, websiteId]);
7481
+ }, [capturedTranscript, experienceType, persistSnapshot, safeSpeak, serverUrl, stopBackgroundVoiceCapture, steps, toursApiBase, websiteId]);
7411
7482
  const toggleVoiceCapture = (0, import_react16.useCallback)(() => {
7412
7483
  if (isVoiceCaptureActive || shouldKeepVoiceCaptureRef.current) {
7413
7484
  shouldKeepVoiceCaptureRef.current = false;
7414
7485
  stopBackgroundVoiceCapture();
7486
+ persistSnapshot({ voiceCaptureEnabled: false });
7415
7487
  return;
7416
7488
  }
7417
7489
  shouldKeepVoiceCaptureRef.current = true;
7490
+ persistSnapshot({ voiceCaptureEnabled: true });
7418
7491
  startBackgroundVoiceCapture();
7419
- }, [isVoiceCaptureActive, startBackgroundVoiceCapture, stopBackgroundVoiceCapture]);
7492
+ }, [isVoiceCaptureActive, persistSnapshot, startBackgroundVoiceCapture, stopBackgroundVoiceCapture]);
7420
7493
  (0, import_react16.useEffect)(() => {
7421
7494
  if (!isRecording) return;
7422
7495
  if (phase === "active" && shouldKeepVoiceCaptureRef.current && !isVoiceCaptureActive) {
@@ -7452,22 +7525,6 @@ function useRecordingMode({
7452
7525
  }
7453
7526
  }
7454
7527
  }, [isRecording, phase, markStep, undoLastStep, previewSteps, approveNarration, redoNarration]);
7455
- (0, import_react16.useEffect)(() => {
7456
- if (!isRecording && steps.length === 0 && captureEvents.length === 0 && !capturedTranscript.trim()) {
7457
- clearPersistedRecordingSession();
7458
- return;
7459
- }
7460
- persistRecordingSession({
7461
- version: 1,
7462
- experienceType,
7463
- phase,
7464
- steps,
7465
- captureEvents,
7466
- capturedTranscript,
7467
- pendingClicks: pendingClicksRef.current,
7468
- selectedStepType
7469
- });
7470
- }, [captureEvents, capturedTranscript, experienceType, isRecording, phase, selectedStepType, steps]);
7471
7528
  return {
7472
7529
  phase,
7473
7530
  steps,
package/dist/index.mjs CHANGED
@@ -2670,7 +2670,8 @@ function sanitizePersistedRecordingSession(raw) {
2670
2670
  captureEvents: raw.captureEvents,
2671
2671
  capturedTranscript: typeof raw.capturedTranscript === "string" ? raw.capturedTranscript : "",
2672
2672
  pendingClicks: raw.pendingClicks,
2673
- selectedStepType: raw.selectedStepType
2673
+ selectedStepType: raw.selectedStepType,
2674
+ voiceCaptureEnabled: raw.voiceCaptureEnabled === true
2674
2675
  };
2675
2676
  }
2676
2677
  function readPersistedRecordingSession() {
@@ -6661,6 +6662,10 @@ function useRecordingMode({
6661
6662
  const stepsRef = useRef12([]);
6662
6663
  stepsRef.current = steps;
6663
6664
  const captureEventsRef = useRef12([]);
6665
+ const capturedTranscriptRef = useRef12(capturedTranscript);
6666
+ capturedTranscriptRef.current = capturedTranscript;
6667
+ const phaseRef = useRef12(phase);
6668
+ phaseRef.current = phase;
6664
6669
  const safeSpeak = useCallback10((text) => {
6665
6670
  void voice.speak(text).catch((err) => {
6666
6671
  console.warn("[Recording] Voice playback unavailable:", err);
@@ -6668,7 +6673,7 @@ function useRecordingMode({
6668
6673
  }, [voice]);
6669
6674
  captureEventsRef.current = captureEvents;
6670
6675
  const pendingClicksRef = useRef12(restoredSession?.pendingClicks ?? []);
6671
- const shouldKeepVoiceCaptureRef = useRef12(false);
6676
+ const shouldKeepVoiceCaptureRef = useRef12(restoredSession?.voiceCaptureEnabled === true);
6672
6677
  const resumeVoiceAfterNarrationRef = useRef12(false);
6673
6678
  const lastAutoNoteRef = useRef12("");
6674
6679
  const lastHoverKeyRef = useRef12("");
@@ -6678,19 +6683,43 @@ function useRecordingMode({
6678
6683
  const isRecording = phase !== "idle";
6679
6684
  const stepCount = steps.length;
6680
6685
  const captureEventCount = captureEvents.length;
6681
- const appendCaptureEvent = useCallback10((event) => {
6682
- setCaptureEvents((prev) => {
6683
- const nextEvent = {
6684
- id: event.id || newCaptureId(event.type),
6685
- order: prev.length,
6686
- timestamp: event.timestamp || (/* @__PURE__ */ new Date()).toISOString(),
6687
- ...event
6688
- };
6689
- return [...prev, nextEvent];
6686
+ const persistSnapshot = useCallback10((overrides) => {
6687
+ const nextPhase = overrides?.phase ?? phaseRef.current;
6688
+ const nextSteps = overrides?.steps ?? stepsRef.current;
6689
+ const nextCaptureEvents = overrides?.captureEvents ?? captureEventsRef.current;
6690
+ const nextTranscript = overrides?.capturedTranscript ?? capturedTranscriptRef.current;
6691
+ const nextSelectedStepType = overrides?.selectedStepType ?? selectedStepTypeRef.current;
6692
+ const nextVoiceCaptureEnabled = overrides?.voiceCaptureEnabled ?? shouldKeepVoiceCaptureRef.current;
6693
+ if (nextPhase === "idle" && nextSteps.length === 0 && nextCaptureEvents.length === 0 && !nextTranscript.trim()) {
6694
+ clearPersistedRecordingSession();
6695
+ return;
6696
+ }
6697
+ persistRecordingSession({
6698
+ version: 1,
6699
+ experienceType,
6700
+ phase: nextPhase,
6701
+ steps: nextSteps,
6702
+ captureEvents: nextCaptureEvents,
6703
+ capturedTranscript: nextTranscript,
6704
+ pendingClicks: pendingClicksRef.current,
6705
+ selectedStepType: nextSelectedStepType,
6706
+ voiceCaptureEnabled: nextVoiceCaptureEnabled
6690
6707
  });
6691
- }, []);
6708
+ }, [experienceType]);
6709
+ const appendCaptureEvent = useCallback10((event) => {
6710
+ const nextEvent = {
6711
+ id: event.id || newCaptureId(event.type),
6712
+ order: captureEventsRef.current.length,
6713
+ timestamp: event.timestamp || (/* @__PURE__ */ new Date()).toISOString(),
6714
+ ...event
6715
+ };
6716
+ const nextCaptureEvents = [...captureEventsRef.current, nextEvent];
6717
+ captureEventsRef.current = nextCaptureEvents;
6718
+ setCaptureEvents(nextCaptureEvents);
6719
+ persistSnapshot({ captureEvents: nextCaptureEvents });
6720
+ }, [persistSnapshot]);
6692
6721
  const updateCaptureEvent = useCallback10((id, metadataPatch) => {
6693
- setCaptureEvents((prev) => prev.map((event) => {
6722
+ const nextCaptureEvents = captureEventsRef.current.map((event) => {
6694
6723
  if (event.id === id) {
6695
6724
  return {
6696
6725
  ...event,
@@ -6701,14 +6730,19 @@ function useRecordingMode({
6701
6730
  };
6702
6731
  }
6703
6732
  return event;
6704
- }));
6705
- }, []);
6733
+ });
6734
+ captureEventsRef.current = nextCaptureEvents;
6735
+ setCaptureEvents(nextCaptureEvents);
6736
+ persistSnapshot({ captureEvents: nextCaptureEvents });
6737
+ }, [persistSnapshot]);
6706
6738
  const appendVoiceNote = useCallback10((transcript) => {
6707
6739
  const text = transcript.trim();
6708
6740
  if (!text) return;
6709
6741
  if (lastAutoNoteRef.current === text) return;
6710
6742
  lastAutoNoteRef.current = text;
6711
- setCapturedTranscript((prev) => prev ? `${prev} ${text}` : text);
6743
+ const nextTranscript = capturedTranscriptRef.current ? `${capturedTranscriptRef.current} ${text}` : text;
6744
+ capturedTranscriptRef.current = nextTranscript;
6745
+ setCapturedTranscript(nextTranscript);
6712
6746
  const recordedClicks = [...pendingClicksRef.current];
6713
6747
  const lastClick = recordedClicks[recordedClicks.length - 1];
6714
6748
  const newStep = {
@@ -6742,7 +6776,10 @@ function useRecordingMode({
6742
6776
  note: "voice_capture"
6743
6777
  }
6744
6778
  };
6745
- setSteps((prev) => [...prev, newStep]);
6779
+ const nextSteps = [...stepsRef.current, newStep];
6780
+ stepsRef.current = nextSteps;
6781
+ setSteps(nextSteps);
6782
+ persistSnapshot({ steps: nextSteps, capturedTranscript: nextTranscript });
6746
6783
  appendCaptureEvent({
6747
6784
  type: "note",
6748
6785
  url: getCurrentTourUrl(),
@@ -6758,7 +6795,7 @@ function useRecordingMode({
6758
6795
  stepOrder: newStep.order
6759
6796
  }
6760
6797
  });
6761
- }, [appendCaptureEvent]);
6798
+ }, [appendCaptureEvent, persistSnapshot]);
6762
6799
  const stopBackgroundVoiceCapture = useCallback10(() => {
6763
6800
  voice.stopListening();
6764
6801
  setIsVoiceCaptureActive(false);
@@ -6781,24 +6818,37 @@ function useRecordingMode({
6781
6818
  setIsVoiceCaptureActive(true);
6782
6819
  }, [appendVoiceNote, isRecording, phase, voice]);
6783
6820
  const startRecording = useCallback10(() => {
6784
- setPhase("active");
6785
- setSteps([]);
6786
- pendingClicksRef.current = [];
6787
- setCaptureEvents([{
6821
+ const nextPhase = "active";
6822
+ const nextCaptureEvents = [{
6788
6823
  id: newCaptureId("session_start"),
6789
6824
  order: 0,
6790
6825
  timestamp: (/* @__PURE__ */ new Date()).toISOString(),
6791
6826
  type: "session_start",
6792
6827
  url: getCurrentTourUrl(),
6793
6828
  metadata: { mode: "capture_v1" }
6794
- }]);
6829
+ }];
6830
+ phaseRef.current = nextPhase;
6831
+ setPhase(nextPhase);
6832
+ stepsRef.current = [];
6833
+ setSteps([]);
6834
+ pendingClicksRef.current = [];
6835
+ captureEventsRef.current = nextCaptureEvents;
6836
+ setCaptureEvents(nextCaptureEvents);
6837
+ capturedTranscriptRef.current = "";
6795
6838
  setCapturedTranscript("");
6796
6839
  setIsVoiceCaptureActive(false);
6797
6840
  shouldKeepVoiceCaptureRef.current = false;
6798
6841
  lastAutoNoteRef.current = "";
6799
6842
  lastHoverKeyRef.current = "";
6800
6843
  lastHoverAtRef.current = 0;
6801
- }, []);
6844
+ persistSnapshot({
6845
+ phase: nextPhase,
6846
+ steps: [],
6847
+ captureEvents: nextCaptureEvents,
6848
+ capturedTranscript: "",
6849
+ voiceCaptureEnabled: false
6850
+ });
6851
+ }, [persistSnapshot]);
6802
6852
  const markStep = useCallback10(() => {
6803
6853
  if (phase !== "active") return;
6804
6854
  setPhase("selecting");
@@ -6928,7 +6978,9 @@ function useRecordingMode({
6928
6978
  note: raw
6929
6979
  }
6930
6980
  };
6931
- setSteps((prev) => [...prev, newStep]);
6981
+ const nextSteps = [...stepsRef.current, newStep];
6982
+ stepsRef.current = nextSteps;
6983
+ setSteps(nextSteps);
6932
6984
  appendCaptureEvent({
6933
6985
  type: "note",
6934
6986
  url: getCurrentTourUrl(),
@@ -6947,14 +6999,16 @@ function useRecordingMode({
6947
6999
  setSelectedElement(null);
6948
7000
  setPendingNarration("");
6949
7001
  setPolishedNarration("");
7002
+ phaseRef.current = "active";
6950
7003
  setPhase("active");
7004
+ persistSnapshot({ steps: nextSteps, phase: "active" });
6951
7005
  safeSpeak(`Note ${newStep.order + 1} saved. Keep performing the workflow or add another note.`);
6952
7006
  if (resumeVoiceAfterNarrationRef.current || shouldKeepVoiceCaptureRef.current) {
6953
7007
  window.setTimeout(() => {
6954
7008
  if (shouldKeepVoiceCaptureRef.current) startBackgroundVoiceCapture();
6955
7009
  }, 0);
6956
7010
  }
6957
- }, [appendCaptureEvent, phase, polishedNarration, pendingNarration, selectedElement, selectedStepType, startBackgroundVoiceCapture, voice]);
7011
+ }, [appendCaptureEvent, phase, polishedNarration, pendingNarration, persistSnapshot, selectedElement, selectedStepType, startBackgroundVoiceCapture, voice]);
6958
7012
  const redoNarration = useCallback10(() => {
6959
7013
  if (phase !== "reviewing") return;
6960
7014
  setPendingNarration("");
@@ -6966,23 +7020,27 @@ function useRecordingMode({
6966
7020
  setPolishedNarration(text);
6967
7021
  }, []);
6968
7022
  const continueRecording = useCallback10(() => {
7023
+ phaseRef.current = "active";
6969
7024
  setPhase("active");
7025
+ persistSnapshot({ phase: "active" });
6970
7026
  if (shouldKeepVoiceCaptureRef.current) {
6971
7027
  window.setTimeout(() => {
6972
7028
  if (shouldKeepVoiceCaptureRef.current) startBackgroundVoiceCapture();
6973
7029
  }, 0);
6974
7030
  }
6975
- }, [startBackgroundVoiceCapture]);
7031
+ }, [persistSnapshot, startBackgroundVoiceCapture]);
6976
7032
  const undoLastStep = useCallback10(() => {
6977
- setSteps((prev) => {
6978
- const next = prev.slice(0, -1);
6979
- safeSpeak(
6980
- next.length > 0 ? `Step ${prev.length} removed. ${next.length} step${next.length !== 1 ? "s" : ""} remaining.` : "Step removed. No steps recorded yet."
6981
- );
6982
- return next;
6983
- });
7033
+ const previous = stepsRef.current;
7034
+ const nextSteps = previous.slice(0, -1);
7035
+ stepsRef.current = nextSteps;
7036
+ setSteps(nextSteps);
7037
+ safeSpeak(
7038
+ nextSteps.length > 0 ? `Step ${previous.length} removed. ${nextSteps.length} step${nextSteps.length !== 1 ? "s" : ""} remaining.` : "Step removed. No steps recorded yet."
7039
+ );
7040
+ phaseRef.current = "active";
6984
7041
  setPhase("active");
6985
- }, [voice]);
7042
+ persistSnapshot({ steps: nextSteps, phase: "active" });
7043
+ }, [persistSnapshot, voice]);
6986
7044
  const previewSteps = useCallback10(() => {
6987
7045
  if (steps.length === 0) {
6988
7046
  safeSpeak("No steps recorded yet.");
@@ -6995,18 +7053,24 @@ function useRecordingMode({
6995
7053
  shouldKeepVoiceCaptureRef.current = false;
6996
7054
  resumeVoiceAfterNarrationRef.current = false;
6997
7055
  stopBackgroundVoiceCapture();
6998
- }, [stopBackgroundVoiceCapture]);
7056
+ persistSnapshot({ voiceCaptureEnabled: false });
7057
+ }, [persistSnapshot, stopBackgroundVoiceCapture]);
6999
7058
  const cancelRecording = useCallback10(() => {
7000
7059
  shouldKeepVoiceCaptureRef.current = false;
7001
7060
  stopBackgroundVoiceCapture();
7061
+ phaseRef.current = "idle";
7002
7062
  setPhase("idle");
7063
+ stepsRef.current = [];
7003
7064
  setSteps([]);
7065
+ captureEventsRef.current = [];
7004
7066
  setCaptureEvents([]);
7067
+ capturedTranscriptRef.current = "";
7005
7068
  setCapturedTranscript("");
7006
7069
  pendingClicksRef.current = [];
7007
7070
  setSelectedElement(null);
7008
7071
  setPendingNarration("");
7009
7072
  setPolishedNarration("");
7073
+ clearPersistedRecordingSession();
7010
7074
  safeSpeak("Recording cancelled.");
7011
7075
  }, [safeSpeak, stopBackgroundVoiceCapture]);
7012
7076
  useEffect15(() => {
@@ -7050,22 +7114,20 @@ function useRecordingMode({
7050
7114
  }
7051
7115
  const eventId = newCaptureId("click");
7052
7116
  const timestamp = (/* @__PURE__ */ new Date()).toISOString();
7117
+ appendCaptureEvent({
7118
+ id: eventId,
7119
+ timestamp,
7120
+ type: "click",
7121
+ url: click.url,
7122
+ label: click.label,
7123
+ tagName: click.tagName,
7124
+ testId: click.testId,
7125
+ fingerprint: click.fingerprint,
7126
+ textContaining: click.textContaining,
7127
+ metadata: buildCaptureEventMetadata(interactiveTarget, "click")
7128
+ });
7053
7129
  void captureScreenshot().then((beforeScreenshot) => {
7054
- appendCaptureEvent({
7055
- id: eventId,
7056
- timestamp,
7057
- type: "click",
7058
- url: click.url,
7059
- label: click.label,
7060
- tagName: click.tagName,
7061
- testId: click.testId,
7062
- fingerprint: click.fingerprint,
7063
- textContaining: click.textContaining,
7064
- metadata: {
7065
- ...buildCaptureEventMetadata(interactiveTarget, "click"),
7066
- beforeScreenshot
7067
- }
7068
- });
7130
+ updateCaptureEvent(eventId, { beforeScreenshot });
7069
7131
  window.setTimeout(async () => {
7070
7132
  try {
7071
7133
  const afterScreenshot = await captureScreenshot();
@@ -7076,18 +7138,6 @@ function useRecordingMode({
7076
7138
  }, 1200);
7077
7139
  }).catch((err) => {
7078
7140
  console.warn("[Recording] Failed to capture before-click screenshot:", err);
7079
- appendCaptureEvent({
7080
- id: eventId,
7081
- timestamp,
7082
- type: "click",
7083
- url: click.url,
7084
- label: click.label,
7085
- tagName: click.tagName,
7086
- testId: click.testId,
7087
- fingerprint: click.fingerprint,
7088
- textContaining: click.textContaining,
7089
- metadata: buildCaptureEventMetadata(interactiveTarget, "click")
7090
- });
7091
7141
  });
7092
7142
  };
7093
7143
  const handleInputEvent = (event) => {
@@ -7133,10 +7183,24 @@ function useRecordingMode({
7133
7183
  window.clearInterval(routePoll);
7134
7184
  };
7135
7185
  }, [appendCaptureEvent, isRecording]);
7186
+ useEffect15(() => {
7187
+ if (!isRecording || typeof window === "undefined") return;
7188
+ const flushRecordingSession = () => {
7189
+ persistSnapshot();
7190
+ };
7191
+ window.addEventListener("pagehide", flushRecordingSession);
7192
+ window.addEventListener("beforeunload", flushRecordingSession);
7193
+ return () => {
7194
+ window.removeEventListener("pagehide", flushRecordingSession);
7195
+ window.removeEventListener("beforeunload", flushRecordingSession);
7196
+ };
7197
+ }, [isRecording, persistSnapshot]);
7136
7198
  const stopRecording = useCallback10(async (tourName, targetUserTypes) => {
7199
+ phaseRef.current = "finishing";
7137
7200
  setPhase("finishing");
7138
7201
  shouldKeepVoiceCaptureRef.current = false;
7139
7202
  stopBackgroundVoiceCapture();
7203
+ persistSnapshot({ phase: "finishing", voiceCaptureEnabled: false });
7140
7204
  if (steps.length === 0 && captureEventsRef.current.length <= 1) {
7141
7205
  safeSpeak("No workflow captured yet.");
7142
7206
  setPhase("idle");
@@ -7182,11 +7246,16 @@ function useRecordingMode({
7182
7246
  throw new Error(errorMessage);
7183
7247
  }
7184
7248
  const data = await res.json();
7249
+ stepsRef.current = [];
7185
7250
  setSteps([]);
7251
+ captureEventsRef.current = [];
7186
7252
  setCaptureEvents([]);
7253
+ capturedTranscriptRef.current = "";
7187
7254
  setCapturedTranscript("");
7188
7255
  pendingClicksRef.current = [];
7256
+ phaseRef.current = "idle";
7189
7257
  setPhase("idle");
7258
+ clearPersistedRecordingSession();
7190
7259
  const savedStepCount = Array.isArray(data?.tour?.steps) ? data.tour.steps.length : 0;
7191
7260
  const itemLabel = experienceType === "onboarding" ? "workflow" : "tour";
7192
7261
  safeSpeak(`${itemLabel === "tour" ? "Tour" : "Workflow"} "${tourName}" saved as a draft with ${savedStepCount} generated steps. Review and preview it before publishing.`);
@@ -7194,19 +7263,23 @@ function useRecordingMode({
7194
7263
  } catch (err) {
7195
7264
  console.error("[Recording] Failed to save tour:", err);
7196
7265
  safeSpeak(`Failed to save the ${experienceType === "onboarding" ? "workflow" : "tour"}. Please try again.`);
7266
+ phaseRef.current = "active";
7197
7267
  setPhase("active");
7268
+ persistSnapshot({ phase: "active" });
7198
7269
  return null;
7199
7270
  }
7200
- }, [capturedTranscript, experienceType, safeSpeak, serverUrl, stopBackgroundVoiceCapture, steps, toursApiBase, websiteId]);
7271
+ }, [capturedTranscript, experienceType, persistSnapshot, safeSpeak, serverUrl, stopBackgroundVoiceCapture, steps, toursApiBase, websiteId]);
7201
7272
  const toggleVoiceCapture = useCallback10(() => {
7202
7273
  if (isVoiceCaptureActive || shouldKeepVoiceCaptureRef.current) {
7203
7274
  shouldKeepVoiceCaptureRef.current = false;
7204
7275
  stopBackgroundVoiceCapture();
7276
+ persistSnapshot({ voiceCaptureEnabled: false });
7205
7277
  return;
7206
7278
  }
7207
7279
  shouldKeepVoiceCaptureRef.current = true;
7280
+ persistSnapshot({ voiceCaptureEnabled: true });
7208
7281
  startBackgroundVoiceCapture();
7209
- }, [isVoiceCaptureActive, startBackgroundVoiceCapture, stopBackgroundVoiceCapture]);
7282
+ }, [isVoiceCaptureActive, persistSnapshot, startBackgroundVoiceCapture, stopBackgroundVoiceCapture]);
7210
7283
  useEffect15(() => {
7211
7284
  if (!isRecording) return;
7212
7285
  if (phase === "active" && shouldKeepVoiceCaptureRef.current && !isVoiceCaptureActive) {
@@ -7242,22 +7315,6 @@ function useRecordingMode({
7242
7315
  }
7243
7316
  }
7244
7317
  }, [isRecording, phase, markStep, undoLastStep, previewSteps, approveNarration, redoNarration]);
7245
- useEffect15(() => {
7246
- if (!isRecording && steps.length === 0 && captureEvents.length === 0 && !capturedTranscript.trim()) {
7247
- clearPersistedRecordingSession();
7248
- return;
7249
- }
7250
- persistRecordingSession({
7251
- version: 1,
7252
- experienceType,
7253
- phase,
7254
- steps,
7255
- captureEvents,
7256
- capturedTranscript,
7257
- pendingClicks: pendingClicksRef.current,
7258
- selectedStepType
7259
- });
7260
- }, [captureEvents, capturedTranscript, experienceType, isRecording, phase, selectedStepType, steps]);
7261
7318
  return {
7262
7319
  phase,
7263
7320
  steps,
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@modelnex/sdk",
3
- "version": "0.5.33",
3
+ "version": "0.5.34",
4
4
  "description": "React SDK for natural language control of web apps via AI agents",
5
5
  "main": "./dist/index.js",
6
6
  "module": "./dist/index.mjs",