@modelnex/sdk 0.5.32 → 0.5.34

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/dist/index.js +248 -72
  2. package/dist/index.mjs +248 -72
  3. package/package.json +1 -1
package/dist/index.js CHANGED
@@ -2832,6 +2832,92 @@ async function validateInjectedDevModeKey(serverUrl, websiteId, devModeKey) {
2832
2832
  return validationPromise;
2833
2833
  }
2834
2834
 
2835
+ // src/utils/recordingPersistence.ts
2836
+ var RECORDING_SESSION_STORAGE_KEY = "modelnex-recording-session";
2837
+ var RESTORABLE_RECORDING_PHASES = /* @__PURE__ */ new Set([
2838
+ "active",
2839
+ "selecting",
2840
+ "configuring",
2841
+ "narrating",
2842
+ "reviewing",
2843
+ "saved",
2844
+ "finishing"
2845
+ ]);
2846
+ var TOUR_STEP_TYPES = /* @__PURE__ */ new Set([
2847
+ "narrate",
2848
+ "act",
2849
+ "input",
2850
+ "ask_and_fill",
2851
+ "user_input",
2852
+ "ask_or_fill",
2853
+ "wait_then_act"
2854
+ ]);
2855
+ function canUseSessionStorage() {
2856
+ return typeof window !== "undefined" && typeof window.sessionStorage !== "undefined";
2857
+ }
2858
+ function isExperienceType(value) {
2859
+ return value === "tour" || value === "onboarding";
2860
+ }
2861
+ function isTourStepType(value) {
2862
+ return typeof value === "string" && TOUR_STEP_TYPES.has(value);
2863
+ }
2864
+ function isPlainObject(value) {
2865
+ return Boolean(value) && typeof value === "object" && !Array.isArray(value);
2866
+ }
2867
+ function sanitizePersistedRecordingSession(raw) {
2868
+ if (!isPlainObject(raw)) return null;
2869
+ if (raw.version !== 1) return null;
2870
+ if (!isExperienceType(raw.experienceType)) return null;
2871
+ if (typeof raw.phase !== "string" || !RESTORABLE_RECORDING_PHASES.has(raw.phase)) return null;
2872
+ if (!Array.isArray(raw.steps) || !Array.isArray(raw.captureEvents) || !Array.isArray(raw.pendingClicks)) return null;
2873
+ if (!isTourStepType(raw.selectedStepType)) return null;
2874
+ return {
2875
+ version: 1,
2876
+ experienceType: raw.experienceType,
2877
+ phase: raw.phase,
2878
+ steps: raw.steps,
2879
+ captureEvents: raw.captureEvents,
2880
+ capturedTranscript: typeof raw.capturedTranscript === "string" ? raw.capturedTranscript : "",
2881
+ pendingClicks: raw.pendingClicks,
2882
+ selectedStepType: raw.selectedStepType,
2883
+ voiceCaptureEnabled: raw.voiceCaptureEnabled === true
2884
+ };
2885
+ }
2886
+ function readPersistedRecordingSession() {
2887
+ if (!canUseSessionStorage()) return null;
2888
+ try {
2889
+ const raw = window.sessionStorage.getItem(RECORDING_SESSION_STORAGE_KEY);
2890
+ if (!raw) return null;
2891
+ const parsed = sanitizePersistedRecordingSession(JSON.parse(raw));
2892
+ if (parsed) {
2893
+ return parsed;
2894
+ }
2895
+ } catch {
2896
+ }
2897
+ clearPersistedRecordingSession();
2898
+ return null;
2899
+ }
2900
+ function readPersistedRecordingExperienceType() {
2901
+ return readPersistedRecordingSession()?.experienceType ?? null;
2902
+ }
2903
+ function hasPersistedRecordingSession() {
2904
+ return readPersistedRecordingSession() !== null;
2905
+ }
2906
+ function persistRecordingSession(session) {
2907
+ if (!canUseSessionStorage()) return;
2908
+ try {
2909
+ window.sessionStorage.setItem(RECORDING_SESSION_STORAGE_KEY, JSON.stringify(session));
2910
+ } catch {
2911
+ }
2912
+ }
2913
+ function clearPersistedRecordingSession() {
2914
+ if (!canUseSessionStorage()) return;
2915
+ try {
2916
+ window.sessionStorage.removeItem(RECORDING_SESSION_STORAGE_KEY);
2917
+ } catch {
2918
+ }
2919
+ }
2920
+
2835
2921
  // src/hooks/useRunCommand.ts
2836
2922
  var import_react9 = require("react");
2837
2923
  function searchTaggedElementsForQuery(store, query, limit = 8) {
@@ -6768,26 +6854,36 @@ function useRecordingMode({
6768
6854
  onPreview,
6769
6855
  experienceType = "tour"
6770
6856
  }) {
6771
- const [phase, setPhase] = (0, import_react16.useState)("idle");
6772
- const [steps, setSteps] = (0, import_react16.useState)([]);
6857
+ const restoredSessionRef = (0, import_react16.useRef)(void 0);
6858
+ if (restoredSessionRef.current === void 0) {
6859
+ restoredSessionRef.current = readPersistedRecordingSession();
6860
+ }
6861
+ const restoredSession = restoredSessionRef.current;
6862
+ const restoredPhase = restoredSession ? "active" : "idle";
6863
+ const [phase, setPhase] = (0, import_react16.useState)(restoredPhase);
6864
+ const [steps, setSteps] = (0, import_react16.useState)(() => restoredSession?.steps ?? []);
6773
6865
  const [selectedElement, setSelectedElement] = (0, import_react16.useState)(null);
6774
- const [selectedStepType, setSelectedStepType] = (0, import_react16.useState)("ask_or_fill");
6866
+ const [selectedStepType, setSelectedStepType] = (0, import_react16.useState)(() => restoredSession?.selectedStepType ?? "ask_or_fill");
6775
6867
  const [pendingNarration, setPendingNarration] = (0, import_react16.useState)("");
6776
6868
  const [polishedNarration, setPolishedNarration] = (0, import_react16.useState)("");
6777
- const [captureEvents, setCaptureEvents] = (0, import_react16.useState)([]);
6778
- const [capturedTranscript, setCapturedTranscript] = (0, import_react16.useState)("");
6869
+ const [captureEvents, setCaptureEvents] = (0, import_react16.useState)(() => restoredSession?.captureEvents ?? []);
6870
+ const [capturedTranscript, setCapturedTranscript] = (0, import_react16.useState)(() => restoredSession?.capturedTranscript ?? "");
6779
6871
  const [isVoiceCaptureActive, setIsVoiceCaptureActive] = (0, import_react16.useState)(false);
6780
6872
  const stepsRef = (0, import_react16.useRef)([]);
6781
6873
  stepsRef.current = steps;
6782
6874
  const captureEventsRef = (0, import_react16.useRef)([]);
6875
+ const capturedTranscriptRef = (0, import_react16.useRef)(capturedTranscript);
6876
+ capturedTranscriptRef.current = capturedTranscript;
6877
+ const phaseRef = (0, import_react16.useRef)(phase);
6878
+ phaseRef.current = phase;
6783
6879
  const safeSpeak = (0, import_react16.useCallback)((text) => {
6784
6880
  void voice.speak(text).catch((err) => {
6785
6881
  console.warn("[Recording] Voice playback unavailable:", err);
6786
6882
  });
6787
6883
  }, [voice]);
6788
6884
  captureEventsRef.current = captureEvents;
6789
- const pendingClicksRef = (0, import_react16.useRef)([]);
6790
- const shouldKeepVoiceCaptureRef = (0, import_react16.useRef)(false);
6885
+ const pendingClicksRef = (0, import_react16.useRef)(restoredSession?.pendingClicks ?? []);
6886
+ const shouldKeepVoiceCaptureRef = (0, import_react16.useRef)(restoredSession?.voiceCaptureEnabled === true);
6791
6887
  const resumeVoiceAfterNarrationRef = (0, import_react16.useRef)(false);
6792
6888
  const lastAutoNoteRef = (0, import_react16.useRef)("");
6793
6889
  const lastHoverKeyRef = (0, import_react16.useRef)("");
@@ -6797,19 +6893,43 @@ function useRecordingMode({
6797
6893
  const isRecording = phase !== "idle";
6798
6894
  const stepCount = steps.length;
6799
6895
  const captureEventCount = captureEvents.length;
6800
- const appendCaptureEvent = (0, import_react16.useCallback)((event) => {
6801
- setCaptureEvents((prev) => {
6802
- const nextEvent = {
6803
- id: event.id || newCaptureId(event.type),
6804
- order: prev.length,
6805
- timestamp: event.timestamp || (/* @__PURE__ */ new Date()).toISOString(),
6806
- ...event
6807
- };
6808
- return [...prev, nextEvent];
6896
+ const persistSnapshot = (0, import_react16.useCallback)((overrides) => {
6897
+ const nextPhase = overrides?.phase ?? phaseRef.current;
6898
+ const nextSteps = overrides?.steps ?? stepsRef.current;
6899
+ const nextCaptureEvents = overrides?.captureEvents ?? captureEventsRef.current;
6900
+ const nextTranscript = overrides?.capturedTranscript ?? capturedTranscriptRef.current;
6901
+ const nextSelectedStepType = overrides?.selectedStepType ?? selectedStepTypeRef.current;
6902
+ const nextVoiceCaptureEnabled = overrides?.voiceCaptureEnabled ?? shouldKeepVoiceCaptureRef.current;
6903
+ if (nextPhase === "idle" && nextSteps.length === 0 && nextCaptureEvents.length === 0 && !nextTranscript.trim()) {
6904
+ clearPersistedRecordingSession();
6905
+ return;
6906
+ }
6907
+ persistRecordingSession({
6908
+ version: 1,
6909
+ experienceType,
6910
+ phase: nextPhase,
6911
+ steps: nextSteps,
6912
+ captureEvents: nextCaptureEvents,
6913
+ capturedTranscript: nextTranscript,
6914
+ pendingClicks: pendingClicksRef.current,
6915
+ selectedStepType: nextSelectedStepType,
6916
+ voiceCaptureEnabled: nextVoiceCaptureEnabled
6809
6917
  });
6810
- }, []);
6918
+ }, [experienceType]);
6919
+ const appendCaptureEvent = (0, import_react16.useCallback)((event) => {
6920
+ const nextEvent = {
6921
+ id: event.id || newCaptureId(event.type),
6922
+ order: captureEventsRef.current.length,
6923
+ timestamp: event.timestamp || (/* @__PURE__ */ new Date()).toISOString(),
6924
+ ...event
6925
+ };
6926
+ const nextCaptureEvents = [...captureEventsRef.current, nextEvent];
6927
+ captureEventsRef.current = nextCaptureEvents;
6928
+ setCaptureEvents(nextCaptureEvents);
6929
+ persistSnapshot({ captureEvents: nextCaptureEvents });
6930
+ }, [persistSnapshot]);
6811
6931
  const updateCaptureEvent = (0, import_react16.useCallback)((id, metadataPatch) => {
6812
- setCaptureEvents((prev) => prev.map((event) => {
6932
+ const nextCaptureEvents = captureEventsRef.current.map((event) => {
6813
6933
  if (event.id === id) {
6814
6934
  return {
6815
6935
  ...event,
@@ -6820,14 +6940,19 @@ function useRecordingMode({
6820
6940
  };
6821
6941
  }
6822
6942
  return event;
6823
- }));
6824
- }, []);
6943
+ });
6944
+ captureEventsRef.current = nextCaptureEvents;
6945
+ setCaptureEvents(nextCaptureEvents);
6946
+ persistSnapshot({ captureEvents: nextCaptureEvents });
6947
+ }, [persistSnapshot]);
6825
6948
  const appendVoiceNote = (0, import_react16.useCallback)((transcript) => {
6826
6949
  const text = transcript.trim();
6827
6950
  if (!text) return;
6828
6951
  if (lastAutoNoteRef.current === text) return;
6829
6952
  lastAutoNoteRef.current = text;
6830
- setCapturedTranscript((prev) => prev ? `${prev} ${text}` : text);
6953
+ const nextTranscript = capturedTranscriptRef.current ? `${capturedTranscriptRef.current} ${text}` : text;
6954
+ capturedTranscriptRef.current = nextTranscript;
6955
+ setCapturedTranscript(nextTranscript);
6831
6956
  const recordedClicks = [...pendingClicksRef.current];
6832
6957
  const lastClick = recordedClicks[recordedClicks.length - 1];
6833
6958
  const newStep = {
@@ -6861,7 +6986,10 @@ function useRecordingMode({
6861
6986
  note: "voice_capture"
6862
6987
  }
6863
6988
  };
6864
- setSteps((prev) => [...prev, newStep]);
6989
+ const nextSteps = [...stepsRef.current, newStep];
6990
+ stepsRef.current = nextSteps;
6991
+ setSteps(nextSteps);
6992
+ persistSnapshot({ steps: nextSteps, capturedTranscript: nextTranscript });
6865
6993
  appendCaptureEvent({
6866
6994
  type: "note",
6867
6995
  url: getCurrentTourUrl(),
@@ -6877,7 +7005,7 @@ function useRecordingMode({
6877
7005
  stepOrder: newStep.order
6878
7006
  }
6879
7007
  });
6880
- }, [appendCaptureEvent]);
7008
+ }, [appendCaptureEvent, persistSnapshot]);
6881
7009
  const stopBackgroundVoiceCapture = (0, import_react16.useCallback)(() => {
6882
7010
  voice.stopListening();
6883
7011
  setIsVoiceCaptureActive(false);
@@ -6900,24 +7028,37 @@ function useRecordingMode({
6900
7028
  setIsVoiceCaptureActive(true);
6901
7029
  }, [appendVoiceNote, isRecording, phase, voice]);
6902
7030
  const startRecording = (0, import_react16.useCallback)(() => {
6903
- setPhase("active");
6904
- setSteps([]);
6905
- pendingClicksRef.current = [];
6906
- setCaptureEvents([{
7031
+ const nextPhase = "active";
7032
+ const nextCaptureEvents = [{
6907
7033
  id: newCaptureId("session_start"),
6908
7034
  order: 0,
6909
7035
  timestamp: (/* @__PURE__ */ new Date()).toISOString(),
6910
7036
  type: "session_start",
6911
7037
  url: getCurrentTourUrl(),
6912
7038
  metadata: { mode: "capture_v1" }
6913
- }]);
7039
+ }];
7040
+ phaseRef.current = nextPhase;
7041
+ setPhase(nextPhase);
7042
+ stepsRef.current = [];
7043
+ setSteps([]);
7044
+ pendingClicksRef.current = [];
7045
+ captureEventsRef.current = nextCaptureEvents;
7046
+ setCaptureEvents(nextCaptureEvents);
7047
+ capturedTranscriptRef.current = "";
6914
7048
  setCapturedTranscript("");
6915
7049
  setIsVoiceCaptureActive(false);
6916
7050
  shouldKeepVoiceCaptureRef.current = false;
6917
7051
  lastAutoNoteRef.current = "";
6918
7052
  lastHoverKeyRef.current = "";
6919
7053
  lastHoverAtRef.current = 0;
6920
- }, []);
7054
+ persistSnapshot({
7055
+ phase: nextPhase,
7056
+ steps: [],
7057
+ captureEvents: nextCaptureEvents,
7058
+ capturedTranscript: "",
7059
+ voiceCaptureEnabled: false
7060
+ });
7061
+ }, [persistSnapshot]);
6921
7062
  const markStep = (0, import_react16.useCallback)(() => {
6922
7063
  if (phase !== "active") return;
6923
7064
  setPhase("selecting");
@@ -7047,7 +7188,9 @@ function useRecordingMode({
7047
7188
  note: raw
7048
7189
  }
7049
7190
  };
7050
- setSteps((prev) => [...prev, newStep]);
7191
+ const nextSteps = [...stepsRef.current, newStep];
7192
+ stepsRef.current = nextSteps;
7193
+ setSteps(nextSteps);
7051
7194
  appendCaptureEvent({
7052
7195
  type: "note",
7053
7196
  url: getCurrentTourUrl(),
@@ -7066,14 +7209,16 @@ function useRecordingMode({
7066
7209
  setSelectedElement(null);
7067
7210
  setPendingNarration("");
7068
7211
  setPolishedNarration("");
7212
+ phaseRef.current = "active";
7069
7213
  setPhase("active");
7214
+ persistSnapshot({ steps: nextSteps, phase: "active" });
7070
7215
  safeSpeak(`Note ${newStep.order + 1} saved. Keep performing the workflow or add another note.`);
7071
7216
  if (resumeVoiceAfterNarrationRef.current || shouldKeepVoiceCaptureRef.current) {
7072
7217
  window.setTimeout(() => {
7073
7218
  if (shouldKeepVoiceCaptureRef.current) startBackgroundVoiceCapture();
7074
7219
  }, 0);
7075
7220
  }
7076
- }, [appendCaptureEvent, phase, polishedNarration, pendingNarration, selectedElement, selectedStepType, startBackgroundVoiceCapture, voice]);
7221
+ }, [appendCaptureEvent, phase, polishedNarration, pendingNarration, persistSnapshot, selectedElement, selectedStepType, startBackgroundVoiceCapture, voice]);
7077
7222
  const redoNarration = (0, import_react16.useCallback)(() => {
7078
7223
  if (phase !== "reviewing") return;
7079
7224
  setPendingNarration("");
@@ -7085,23 +7230,27 @@ function useRecordingMode({
7085
7230
  setPolishedNarration(text);
7086
7231
  }, []);
7087
7232
  const continueRecording = (0, import_react16.useCallback)(() => {
7233
+ phaseRef.current = "active";
7088
7234
  setPhase("active");
7235
+ persistSnapshot({ phase: "active" });
7089
7236
  if (shouldKeepVoiceCaptureRef.current) {
7090
7237
  window.setTimeout(() => {
7091
7238
  if (shouldKeepVoiceCaptureRef.current) startBackgroundVoiceCapture();
7092
7239
  }, 0);
7093
7240
  }
7094
- }, [startBackgroundVoiceCapture]);
7241
+ }, [persistSnapshot, startBackgroundVoiceCapture]);
7095
7242
  const undoLastStep = (0, import_react16.useCallback)(() => {
7096
- setSteps((prev) => {
7097
- const next = prev.slice(0, -1);
7098
- safeSpeak(
7099
- next.length > 0 ? `Step ${prev.length} removed. ${next.length} step${next.length !== 1 ? "s" : ""} remaining.` : "Step removed. No steps recorded yet."
7100
- );
7101
- return next;
7102
- });
7243
+ const previous = stepsRef.current;
7244
+ const nextSteps = previous.slice(0, -1);
7245
+ stepsRef.current = nextSteps;
7246
+ setSteps(nextSteps);
7247
+ safeSpeak(
7248
+ nextSteps.length > 0 ? `Step ${previous.length} removed. ${nextSteps.length} step${nextSteps.length !== 1 ? "s" : ""} remaining.` : "Step removed. No steps recorded yet."
7249
+ );
7250
+ phaseRef.current = "active";
7103
7251
  setPhase("active");
7104
- }, [voice]);
7252
+ persistSnapshot({ steps: nextSteps, phase: "active" });
7253
+ }, [persistSnapshot, voice]);
7105
7254
  const previewSteps = (0, import_react16.useCallback)(() => {
7106
7255
  if (steps.length === 0) {
7107
7256
  safeSpeak("No steps recorded yet.");
@@ -7114,18 +7263,24 @@ function useRecordingMode({
7114
7263
  shouldKeepVoiceCaptureRef.current = false;
7115
7264
  resumeVoiceAfterNarrationRef.current = false;
7116
7265
  stopBackgroundVoiceCapture();
7117
- }, [stopBackgroundVoiceCapture]);
7266
+ persistSnapshot({ voiceCaptureEnabled: false });
7267
+ }, [persistSnapshot, stopBackgroundVoiceCapture]);
7118
7268
  const cancelRecording = (0, import_react16.useCallback)(() => {
7119
7269
  shouldKeepVoiceCaptureRef.current = false;
7120
7270
  stopBackgroundVoiceCapture();
7271
+ phaseRef.current = "idle";
7121
7272
  setPhase("idle");
7273
+ stepsRef.current = [];
7122
7274
  setSteps([]);
7275
+ captureEventsRef.current = [];
7123
7276
  setCaptureEvents([]);
7277
+ capturedTranscriptRef.current = "";
7124
7278
  setCapturedTranscript("");
7125
7279
  pendingClicksRef.current = [];
7126
7280
  setSelectedElement(null);
7127
7281
  setPendingNarration("");
7128
7282
  setPolishedNarration("");
7283
+ clearPersistedRecordingSession();
7129
7284
  safeSpeak("Recording cancelled.");
7130
7285
  }, [safeSpeak, stopBackgroundVoiceCapture]);
7131
7286
  (0, import_react16.useEffect)(() => {
@@ -7169,22 +7324,20 @@ function useRecordingMode({
7169
7324
  }
7170
7325
  const eventId = newCaptureId("click");
7171
7326
  const timestamp = (/* @__PURE__ */ new Date()).toISOString();
7327
+ appendCaptureEvent({
7328
+ id: eventId,
7329
+ timestamp,
7330
+ type: "click",
7331
+ url: click.url,
7332
+ label: click.label,
7333
+ tagName: click.tagName,
7334
+ testId: click.testId,
7335
+ fingerprint: click.fingerprint,
7336
+ textContaining: click.textContaining,
7337
+ metadata: buildCaptureEventMetadata(interactiveTarget, "click")
7338
+ });
7172
7339
  void captureScreenshot().then((beforeScreenshot) => {
7173
- appendCaptureEvent({
7174
- id: eventId,
7175
- timestamp,
7176
- type: "click",
7177
- url: click.url,
7178
- label: click.label,
7179
- tagName: click.tagName,
7180
- testId: click.testId,
7181
- fingerprint: click.fingerprint,
7182
- textContaining: click.textContaining,
7183
- metadata: {
7184
- ...buildCaptureEventMetadata(interactiveTarget, "click"),
7185
- beforeScreenshot
7186
- }
7187
- });
7340
+ updateCaptureEvent(eventId, { beforeScreenshot });
7188
7341
  window.setTimeout(async () => {
7189
7342
  try {
7190
7343
  const afterScreenshot = await captureScreenshot();
@@ -7195,18 +7348,6 @@ function useRecordingMode({
7195
7348
  }, 1200);
7196
7349
  }).catch((err) => {
7197
7350
  console.warn("[Recording] Failed to capture before-click screenshot:", err);
7198
- appendCaptureEvent({
7199
- id: eventId,
7200
- timestamp,
7201
- type: "click",
7202
- url: click.url,
7203
- label: click.label,
7204
- tagName: click.tagName,
7205
- testId: click.testId,
7206
- fingerprint: click.fingerprint,
7207
- textContaining: click.textContaining,
7208
- metadata: buildCaptureEventMetadata(interactiveTarget, "click")
7209
- });
7210
7351
  });
7211
7352
  };
7212
7353
  const handleInputEvent = (event) => {
@@ -7252,10 +7393,24 @@ function useRecordingMode({
7252
7393
  window.clearInterval(routePoll);
7253
7394
  };
7254
7395
  }, [appendCaptureEvent, isRecording]);
7396
+ (0, import_react16.useEffect)(() => {
7397
+ if (!isRecording || typeof window === "undefined") return;
7398
+ const flushRecordingSession = () => {
7399
+ persistSnapshot();
7400
+ };
7401
+ window.addEventListener("pagehide", flushRecordingSession);
7402
+ window.addEventListener("beforeunload", flushRecordingSession);
7403
+ return () => {
7404
+ window.removeEventListener("pagehide", flushRecordingSession);
7405
+ window.removeEventListener("beforeunload", flushRecordingSession);
7406
+ };
7407
+ }, [isRecording, persistSnapshot]);
7255
7408
  const stopRecording = (0, import_react16.useCallback)(async (tourName, targetUserTypes) => {
7409
+ phaseRef.current = "finishing";
7256
7410
  setPhase("finishing");
7257
7411
  shouldKeepVoiceCaptureRef.current = false;
7258
7412
  stopBackgroundVoiceCapture();
7413
+ persistSnapshot({ phase: "finishing", voiceCaptureEnabled: false });
7259
7414
  if (steps.length === 0 && captureEventsRef.current.length <= 1) {
7260
7415
  safeSpeak("No workflow captured yet.");
7261
7416
  setPhase("idle");
@@ -7301,11 +7456,16 @@ function useRecordingMode({
7301
7456
  throw new Error(errorMessage);
7302
7457
  }
7303
7458
  const data = await res.json();
7459
+ stepsRef.current = [];
7304
7460
  setSteps([]);
7461
+ captureEventsRef.current = [];
7305
7462
  setCaptureEvents([]);
7463
+ capturedTranscriptRef.current = "";
7306
7464
  setCapturedTranscript("");
7307
7465
  pendingClicksRef.current = [];
7466
+ phaseRef.current = "idle";
7308
7467
  setPhase("idle");
7468
+ clearPersistedRecordingSession();
7309
7469
  const savedStepCount = Array.isArray(data?.tour?.steps) ? data.tour.steps.length : 0;
7310
7470
  const itemLabel = experienceType === "onboarding" ? "workflow" : "tour";
7311
7471
  safeSpeak(`${itemLabel === "tour" ? "Tour" : "Workflow"} "${tourName}" saved as a draft with ${savedStepCount} generated steps. Review and preview it before publishing.`);
@@ -7313,19 +7473,23 @@ function useRecordingMode({
7313
7473
  } catch (err) {
7314
7474
  console.error("[Recording] Failed to save tour:", err);
7315
7475
  safeSpeak(`Failed to save the ${experienceType === "onboarding" ? "workflow" : "tour"}. Please try again.`);
7476
+ phaseRef.current = "active";
7316
7477
  setPhase("active");
7478
+ persistSnapshot({ phase: "active" });
7317
7479
  return null;
7318
7480
  }
7319
- }, [capturedTranscript, experienceType, safeSpeak, serverUrl, stopBackgroundVoiceCapture, steps, toursApiBase, websiteId]);
7481
+ }, [capturedTranscript, experienceType, persistSnapshot, safeSpeak, serverUrl, stopBackgroundVoiceCapture, steps, toursApiBase, websiteId]);
7320
7482
  const toggleVoiceCapture = (0, import_react16.useCallback)(() => {
7321
7483
  if (isVoiceCaptureActive || shouldKeepVoiceCaptureRef.current) {
7322
7484
  shouldKeepVoiceCaptureRef.current = false;
7323
7485
  stopBackgroundVoiceCapture();
7486
+ persistSnapshot({ voiceCaptureEnabled: false });
7324
7487
  return;
7325
7488
  }
7326
7489
  shouldKeepVoiceCaptureRef.current = true;
7490
+ persistSnapshot({ voiceCaptureEnabled: true });
7327
7491
  startBackgroundVoiceCapture();
7328
- }, [isVoiceCaptureActive, startBackgroundVoiceCapture, stopBackgroundVoiceCapture]);
7492
+ }, [isVoiceCaptureActive, persistSnapshot, startBackgroundVoiceCapture, stopBackgroundVoiceCapture]);
7329
7493
  (0, import_react16.useEffect)(() => {
7330
7494
  if (!isRecording) return;
7331
7495
  if (phase === "active" && shouldKeepVoiceCaptureRef.current && !isVoiceCaptureActive) {
@@ -9300,7 +9464,9 @@ function ModelNexChatBubble({
9300
9464
  const [savedDraft, setSavedDraft] = (0, import_react18.useState)(null);
9301
9465
  const [reviewDraft, setReviewDraft] = (0, import_react18.useState)("");
9302
9466
  const [tourLiveTranscript, setTourLiveTranscript] = (0, import_react18.useState)("");
9303
- const [activeRecordingExperienceType, setActiveRecordingExperienceType] = (0, import_react18.useState)(recordingExperienceType);
9467
+ const [activeRecordingExperienceType, setActiveRecordingExperienceType] = (0, import_react18.useState)(
9468
+ () => readPersistedRecordingExperienceType() ?? recordingExperienceType
9469
+ );
9304
9470
  const recording = useRecordingMode({
9305
9471
  serverUrl,
9306
9472
  toursApiBase: ctx?.toursApiBase,
@@ -9310,6 +9476,16 @@ function ModelNexChatBubble({
9310
9476
  });
9311
9477
  const isGeneratingDraft = isRecordingDraftGenerating(recording.phase);
9312
9478
  const showRecordingOverlay = recordingMode && shouldShowRecordingOverlay(recording.phase);
9479
+ (0, import_react18.useEffect)(() => {
9480
+ const shouldBeRecording = recording.phase !== "idle";
9481
+ if (shouldBeRecording && !recordingMode) {
9482
+ setRecordingMode(true);
9483
+ return;
9484
+ }
9485
+ if (!shouldBeRecording && recordingMode && !showStopModal && !savedDraft) {
9486
+ setRecordingMode(false);
9487
+ }
9488
+ }, [recording.phase, recordingMode, savedDraft, setRecordingMode, showStopModal]);
9313
9489
  const playbackController = useExperiencePlaybackController({
9314
9490
  serverUrl,
9315
9491
  commandUrl: ctx?.commandUrl,
@@ -11815,7 +11991,7 @@ var ModelNexProvider = ({
11815
11991
  const [executedFields, setExecutedFields] = (0, import_react21.useState)(/* @__PURE__ */ new Set());
11816
11992
  const [highlightActions, setHighlightActions] = (0, import_react21.useState)(false);
11817
11993
  const [studioMode, setStudioMode] = (0, import_react21.useState)(false);
11818
- const [recordingMode, setRecordingMode] = (0, import_react21.useState)(false);
11994
+ const [recordingMode, setRecordingMode] = (0, import_react21.useState)(() => hasPersistedRecordingSession());
11819
11995
  const [voiceMuted, setVoiceMuted] = (0, import_react21.useState)(false);
11820
11996
  const [socketId, setSocketId] = (0, import_react21.useState)(null);
11821
11997
  const [actions, setActions] = (0, import_react21.useState)(/* @__PURE__ */ new Map());
package/dist/index.mjs CHANGED
@@ -2623,6 +2623,92 @@ async function validateInjectedDevModeKey(serverUrl, websiteId, devModeKey) {
2623
2623
  return validationPromise;
2624
2624
  }
2625
2625
 
2626
+ // src/utils/recordingPersistence.ts
2627
+ var RECORDING_SESSION_STORAGE_KEY = "modelnex-recording-session";
2628
+ var RESTORABLE_RECORDING_PHASES = /* @__PURE__ */ new Set([
2629
+ "active",
2630
+ "selecting",
2631
+ "configuring",
2632
+ "narrating",
2633
+ "reviewing",
2634
+ "saved",
2635
+ "finishing"
2636
+ ]);
2637
+ var TOUR_STEP_TYPES = /* @__PURE__ */ new Set([
2638
+ "narrate",
2639
+ "act",
2640
+ "input",
2641
+ "ask_and_fill",
2642
+ "user_input",
2643
+ "ask_or_fill",
2644
+ "wait_then_act"
2645
+ ]);
2646
+ function canUseSessionStorage() {
2647
+ return typeof window !== "undefined" && typeof window.sessionStorage !== "undefined";
2648
+ }
2649
+ function isExperienceType(value) {
2650
+ return value === "tour" || value === "onboarding";
2651
+ }
2652
+ function isTourStepType(value) {
2653
+ return typeof value === "string" && TOUR_STEP_TYPES.has(value);
2654
+ }
2655
+ function isPlainObject(value) {
2656
+ return Boolean(value) && typeof value === "object" && !Array.isArray(value);
2657
+ }
2658
+ function sanitizePersistedRecordingSession(raw) {
2659
+ if (!isPlainObject(raw)) return null;
2660
+ if (raw.version !== 1) return null;
2661
+ if (!isExperienceType(raw.experienceType)) return null;
2662
+ if (typeof raw.phase !== "string" || !RESTORABLE_RECORDING_PHASES.has(raw.phase)) return null;
2663
+ if (!Array.isArray(raw.steps) || !Array.isArray(raw.captureEvents) || !Array.isArray(raw.pendingClicks)) return null;
2664
+ if (!isTourStepType(raw.selectedStepType)) return null;
2665
+ return {
2666
+ version: 1,
2667
+ experienceType: raw.experienceType,
2668
+ phase: raw.phase,
2669
+ steps: raw.steps,
2670
+ captureEvents: raw.captureEvents,
2671
+ capturedTranscript: typeof raw.capturedTranscript === "string" ? raw.capturedTranscript : "",
2672
+ pendingClicks: raw.pendingClicks,
2673
+ selectedStepType: raw.selectedStepType,
2674
+ voiceCaptureEnabled: raw.voiceCaptureEnabled === true
2675
+ };
2676
+ }
2677
+ function readPersistedRecordingSession() {
2678
+ if (!canUseSessionStorage()) return null;
2679
+ try {
2680
+ const raw = window.sessionStorage.getItem(RECORDING_SESSION_STORAGE_KEY);
2681
+ if (!raw) return null;
2682
+ const parsed = sanitizePersistedRecordingSession(JSON.parse(raw));
2683
+ if (parsed) {
2684
+ return parsed;
2685
+ }
2686
+ } catch {
2687
+ }
2688
+ clearPersistedRecordingSession();
2689
+ return null;
2690
+ }
2691
+ function readPersistedRecordingExperienceType() {
2692
+ return readPersistedRecordingSession()?.experienceType ?? null;
2693
+ }
2694
+ function hasPersistedRecordingSession() {
2695
+ return readPersistedRecordingSession() !== null;
2696
+ }
2697
+ function persistRecordingSession(session) {
2698
+ if (!canUseSessionStorage()) return;
2699
+ try {
2700
+ window.sessionStorage.setItem(RECORDING_SESSION_STORAGE_KEY, JSON.stringify(session));
2701
+ } catch {
2702
+ }
2703
+ }
2704
+ function clearPersistedRecordingSession() {
2705
+ if (!canUseSessionStorage()) return;
2706
+ try {
2707
+ window.sessionStorage.removeItem(RECORDING_SESSION_STORAGE_KEY);
2708
+ } catch {
2709
+ }
2710
+ }
2711
+
2626
2712
  // src/hooks/useRunCommand.ts
2627
2713
  import { useCallback as useCallback5, useContext as useContext2 } from "react";
2628
2714
  function searchTaggedElementsForQuery(store, query, limit = 8) {
@@ -6558,26 +6644,36 @@ function useRecordingMode({
6558
6644
  onPreview,
6559
6645
  experienceType = "tour"
6560
6646
  }) {
6561
- const [phase, setPhase] = useState11("idle");
6562
- const [steps, setSteps] = useState11([]);
6647
+ const restoredSessionRef = useRef12(void 0);
6648
+ if (restoredSessionRef.current === void 0) {
6649
+ restoredSessionRef.current = readPersistedRecordingSession();
6650
+ }
6651
+ const restoredSession = restoredSessionRef.current;
6652
+ const restoredPhase = restoredSession ? "active" : "idle";
6653
+ const [phase, setPhase] = useState11(restoredPhase);
6654
+ const [steps, setSteps] = useState11(() => restoredSession?.steps ?? []);
6563
6655
  const [selectedElement, setSelectedElement] = useState11(null);
6564
- const [selectedStepType, setSelectedStepType] = useState11("ask_or_fill");
6656
+ const [selectedStepType, setSelectedStepType] = useState11(() => restoredSession?.selectedStepType ?? "ask_or_fill");
6565
6657
  const [pendingNarration, setPendingNarration] = useState11("");
6566
6658
  const [polishedNarration, setPolishedNarration] = useState11("");
6567
- const [captureEvents, setCaptureEvents] = useState11([]);
6568
- const [capturedTranscript, setCapturedTranscript] = useState11("");
6659
+ const [captureEvents, setCaptureEvents] = useState11(() => restoredSession?.captureEvents ?? []);
6660
+ const [capturedTranscript, setCapturedTranscript] = useState11(() => restoredSession?.capturedTranscript ?? "");
6569
6661
  const [isVoiceCaptureActive, setIsVoiceCaptureActive] = useState11(false);
6570
6662
  const stepsRef = useRef12([]);
6571
6663
  stepsRef.current = steps;
6572
6664
  const captureEventsRef = useRef12([]);
6665
+ const capturedTranscriptRef = useRef12(capturedTranscript);
6666
+ capturedTranscriptRef.current = capturedTranscript;
6667
+ const phaseRef = useRef12(phase);
6668
+ phaseRef.current = phase;
6573
6669
  const safeSpeak = useCallback10((text) => {
6574
6670
  void voice.speak(text).catch((err) => {
6575
6671
  console.warn("[Recording] Voice playback unavailable:", err);
6576
6672
  });
6577
6673
  }, [voice]);
6578
6674
  captureEventsRef.current = captureEvents;
6579
- const pendingClicksRef = useRef12([]);
6580
- const shouldKeepVoiceCaptureRef = useRef12(false);
6675
+ const pendingClicksRef = useRef12(restoredSession?.pendingClicks ?? []);
6676
+ const shouldKeepVoiceCaptureRef = useRef12(restoredSession?.voiceCaptureEnabled === true);
6581
6677
  const resumeVoiceAfterNarrationRef = useRef12(false);
6582
6678
  const lastAutoNoteRef = useRef12("");
6583
6679
  const lastHoverKeyRef = useRef12("");
@@ -6587,19 +6683,43 @@ function useRecordingMode({
6587
6683
  const isRecording = phase !== "idle";
6588
6684
  const stepCount = steps.length;
6589
6685
  const captureEventCount = captureEvents.length;
6590
- const appendCaptureEvent = useCallback10((event) => {
6591
- setCaptureEvents((prev) => {
6592
- const nextEvent = {
6593
- id: event.id || newCaptureId(event.type),
6594
- order: prev.length,
6595
- timestamp: event.timestamp || (/* @__PURE__ */ new Date()).toISOString(),
6596
- ...event
6597
- };
6598
- return [...prev, nextEvent];
6686
+ const persistSnapshot = useCallback10((overrides) => {
6687
+ const nextPhase = overrides?.phase ?? phaseRef.current;
6688
+ const nextSteps = overrides?.steps ?? stepsRef.current;
6689
+ const nextCaptureEvents = overrides?.captureEvents ?? captureEventsRef.current;
6690
+ const nextTranscript = overrides?.capturedTranscript ?? capturedTranscriptRef.current;
6691
+ const nextSelectedStepType = overrides?.selectedStepType ?? selectedStepTypeRef.current;
6692
+ const nextVoiceCaptureEnabled = overrides?.voiceCaptureEnabled ?? shouldKeepVoiceCaptureRef.current;
6693
+ if (nextPhase === "idle" && nextSteps.length === 0 && nextCaptureEvents.length === 0 && !nextTranscript.trim()) {
6694
+ clearPersistedRecordingSession();
6695
+ return;
6696
+ }
6697
+ persistRecordingSession({
6698
+ version: 1,
6699
+ experienceType,
6700
+ phase: nextPhase,
6701
+ steps: nextSteps,
6702
+ captureEvents: nextCaptureEvents,
6703
+ capturedTranscript: nextTranscript,
6704
+ pendingClicks: pendingClicksRef.current,
6705
+ selectedStepType: nextSelectedStepType,
6706
+ voiceCaptureEnabled: nextVoiceCaptureEnabled
6599
6707
  });
6600
- }, []);
6708
+ }, [experienceType]);
6709
+ const appendCaptureEvent = useCallback10((event) => {
6710
+ const nextEvent = {
6711
+ id: event.id || newCaptureId(event.type),
6712
+ order: captureEventsRef.current.length,
6713
+ timestamp: event.timestamp || (/* @__PURE__ */ new Date()).toISOString(),
6714
+ ...event
6715
+ };
6716
+ const nextCaptureEvents = [...captureEventsRef.current, nextEvent];
6717
+ captureEventsRef.current = nextCaptureEvents;
6718
+ setCaptureEvents(nextCaptureEvents);
6719
+ persistSnapshot({ captureEvents: nextCaptureEvents });
6720
+ }, [persistSnapshot]);
6601
6721
  const updateCaptureEvent = useCallback10((id, metadataPatch) => {
6602
- setCaptureEvents((prev) => prev.map((event) => {
6722
+ const nextCaptureEvents = captureEventsRef.current.map((event) => {
6603
6723
  if (event.id === id) {
6604
6724
  return {
6605
6725
  ...event,
@@ -6610,14 +6730,19 @@ function useRecordingMode({
6610
6730
  };
6611
6731
  }
6612
6732
  return event;
6613
- }));
6614
- }, []);
6733
+ });
6734
+ captureEventsRef.current = nextCaptureEvents;
6735
+ setCaptureEvents(nextCaptureEvents);
6736
+ persistSnapshot({ captureEvents: nextCaptureEvents });
6737
+ }, [persistSnapshot]);
6615
6738
  const appendVoiceNote = useCallback10((transcript) => {
6616
6739
  const text = transcript.trim();
6617
6740
  if (!text) return;
6618
6741
  if (lastAutoNoteRef.current === text) return;
6619
6742
  lastAutoNoteRef.current = text;
6620
- setCapturedTranscript((prev) => prev ? `${prev} ${text}` : text);
6743
+ const nextTranscript = capturedTranscriptRef.current ? `${capturedTranscriptRef.current} ${text}` : text;
6744
+ capturedTranscriptRef.current = nextTranscript;
6745
+ setCapturedTranscript(nextTranscript);
6621
6746
  const recordedClicks = [...pendingClicksRef.current];
6622
6747
  const lastClick = recordedClicks[recordedClicks.length - 1];
6623
6748
  const newStep = {
@@ -6651,7 +6776,10 @@ function useRecordingMode({
6651
6776
  note: "voice_capture"
6652
6777
  }
6653
6778
  };
6654
- setSteps((prev) => [...prev, newStep]);
6779
+ const nextSteps = [...stepsRef.current, newStep];
6780
+ stepsRef.current = nextSteps;
6781
+ setSteps(nextSteps);
6782
+ persistSnapshot({ steps: nextSteps, capturedTranscript: nextTranscript });
6655
6783
  appendCaptureEvent({
6656
6784
  type: "note",
6657
6785
  url: getCurrentTourUrl(),
@@ -6667,7 +6795,7 @@ function useRecordingMode({
6667
6795
  stepOrder: newStep.order
6668
6796
  }
6669
6797
  });
6670
- }, [appendCaptureEvent]);
6798
+ }, [appendCaptureEvent, persistSnapshot]);
6671
6799
  const stopBackgroundVoiceCapture = useCallback10(() => {
6672
6800
  voice.stopListening();
6673
6801
  setIsVoiceCaptureActive(false);
@@ -6690,24 +6818,37 @@ function useRecordingMode({
6690
6818
  setIsVoiceCaptureActive(true);
6691
6819
  }, [appendVoiceNote, isRecording, phase, voice]);
6692
6820
  const startRecording = useCallback10(() => {
6693
- setPhase("active");
6694
- setSteps([]);
6695
- pendingClicksRef.current = [];
6696
- setCaptureEvents([{
6821
+ const nextPhase = "active";
6822
+ const nextCaptureEvents = [{
6697
6823
  id: newCaptureId("session_start"),
6698
6824
  order: 0,
6699
6825
  timestamp: (/* @__PURE__ */ new Date()).toISOString(),
6700
6826
  type: "session_start",
6701
6827
  url: getCurrentTourUrl(),
6702
6828
  metadata: { mode: "capture_v1" }
6703
- }]);
6829
+ }];
6830
+ phaseRef.current = nextPhase;
6831
+ setPhase(nextPhase);
6832
+ stepsRef.current = [];
6833
+ setSteps([]);
6834
+ pendingClicksRef.current = [];
6835
+ captureEventsRef.current = nextCaptureEvents;
6836
+ setCaptureEvents(nextCaptureEvents);
6837
+ capturedTranscriptRef.current = "";
6704
6838
  setCapturedTranscript("");
6705
6839
  setIsVoiceCaptureActive(false);
6706
6840
  shouldKeepVoiceCaptureRef.current = false;
6707
6841
  lastAutoNoteRef.current = "";
6708
6842
  lastHoverKeyRef.current = "";
6709
6843
  lastHoverAtRef.current = 0;
6710
- }, []);
6844
+ persistSnapshot({
6845
+ phase: nextPhase,
6846
+ steps: [],
6847
+ captureEvents: nextCaptureEvents,
6848
+ capturedTranscript: "",
6849
+ voiceCaptureEnabled: false
6850
+ });
6851
+ }, [persistSnapshot]);
6711
6852
  const markStep = useCallback10(() => {
6712
6853
  if (phase !== "active") return;
6713
6854
  setPhase("selecting");
@@ -6837,7 +6978,9 @@ function useRecordingMode({
6837
6978
  note: raw
6838
6979
  }
6839
6980
  };
6840
- setSteps((prev) => [...prev, newStep]);
6981
+ const nextSteps = [...stepsRef.current, newStep];
6982
+ stepsRef.current = nextSteps;
6983
+ setSteps(nextSteps);
6841
6984
  appendCaptureEvent({
6842
6985
  type: "note",
6843
6986
  url: getCurrentTourUrl(),
@@ -6856,14 +6999,16 @@ function useRecordingMode({
6856
6999
  setSelectedElement(null);
6857
7000
  setPendingNarration("");
6858
7001
  setPolishedNarration("");
7002
+ phaseRef.current = "active";
6859
7003
  setPhase("active");
7004
+ persistSnapshot({ steps: nextSteps, phase: "active" });
6860
7005
  safeSpeak(`Note ${newStep.order + 1} saved. Keep performing the workflow or add another note.`);
6861
7006
  if (resumeVoiceAfterNarrationRef.current || shouldKeepVoiceCaptureRef.current) {
6862
7007
  window.setTimeout(() => {
6863
7008
  if (shouldKeepVoiceCaptureRef.current) startBackgroundVoiceCapture();
6864
7009
  }, 0);
6865
7010
  }
6866
- }, [appendCaptureEvent, phase, polishedNarration, pendingNarration, selectedElement, selectedStepType, startBackgroundVoiceCapture, voice]);
7011
+ }, [appendCaptureEvent, phase, polishedNarration, pendingNarration, persistSnapshot, selectedElement, selectedStepType, startBackgroundVoiceCapture, voice]);
6867
7012
  const redoNarration = useCallback10(() => {
6868
7013
  if (phase !== "reviewing") return;
6869
7014
  setPendingNarration("");
@@ -6875,23 +7020,27 @@ function useRecordingMode({
6875
7020
  setPolishedNarration(text);
6876
7021
  }, []);
6877
7022
  const continueRecording = useCallback10(() => {
7023
+ phaseRef.current = "active";
6878
7024
  setPhase("active");
7025
+ persistSnapshot({ phase: "active" });
6879
7026
  if (shouldKeepVoiceCaptureRef.current) {
6880
7027
  window.setTimeout(() => {
6881
7028
  if (shouldKeepVoiceCaptureRef.current) startBackgroundVoiceCapture();
6882
7029
  }, 0);
6883
7030
  }
6884
- }, [startBackgroundVoiceCapture]);
7031
+ }, [persistSnapshot, startBackgroundVoiceCapture]);
6885
7032
  const undoLastStep = useCallback10(() => {
6886
- setSteps((prev) => {
6887
- const next = prev.slice(0, -1);
6888
- safeSpeak(
6889
- next.length > 0 ? `Step ${prev.length} removed. ${next.length} step${next.length !== 1 ? "s" : ""} remaining.` : "Step removed. No steps recorded yet."
6890
- );
6891
- return next;
6892
- });
7033
+ const previous = stepsRef.current;
7034
+ const nextSteps = previous.slice(0, -1);
7035
+ stepsRef.current = nextSteps;
7036
+ setSteps(nextSteps);
7037
+ safeSpeak(
7038
+ nextSteps.length > 0 ? `Step ${previous.length} removed. ${nextSteps.length} step${nextSteps.length !== 1 ? "s" : ""} remaining.` : "Step removed. No steps recorded yet."
7039
+ );
7040
+ phaseRef.current = "active";
6893
7041
  setPhase("active");
6894
- }, [voice]);
7042
+ persistSnapshot({ steps: nextSteps, phase: "active" });
7043
+ }, [persistSnapshot, voice]);
6895
7044
  const previewSteps = useCallback10(() => {
6896
7045
  if (steps.length === 0) {
6897
7046
  safeSpeak("No steps recorded yet.");
@@ -6904,18 +7053,24 @@ function useRecordingMode({
6904
7053
  shouldKeepVoiceCaptureRef.current = false;
6905
7054
  resumeVoiceAfterNarrationRef.current = false;
6906
7055
  stopBackgroundVoiceCapture();
6907
- }, [stopBackgroundVoiceCapture]);
7056
+ persistSnapshot({ voiceCaptureEnabled: false });
7057
+ }, [persistSnapshot, stopBackgroundVoiceCapture]);
6908
7058
  const cancelRecording = useCallback10(() => {
6909
7059
  shouldKeepVoiceCaptureRef.current = false;
6910
7060
  stopBackgroundVoiceCapture();
7061
+ phaseRef.current = "idle";
6911
7062
  setPhase("idle");
7063
+ stepsRef.current = [];
6912
7064
  setSteps([]);
7065
+ captureEventsRef.current = [];
6913
7066
  setCaptureEvents([]);
7067
+ capturedTranscriptRef.current = "";
6914
7068
  setCapturedTranscript("");
6915
7069
  pendingClicksRef.current = [];
6916
7070
  setSelectedElement(null);
6917
7071
  setPendingNarration("");
6918
7072
  setPolishedNarration("");
7073
+ clearPersistedRecordingSession();
6919
7074
  safeSpeak("Recording cancelled.");
6920
7075
  }, [safeSpeak, stopBackgroundVoiceCapture]);
6921
7076
  useEffect15(() => {
@@ -6959,22 +7114,20 @@ function useRecordingMode({
6959
7114
  }
6960
7115
  const eventId = newCaptureId("click");
6961
7116
  const timestamp = (/* @__PURE__ */ new Date()).toISOString();
7117
+ appendCaptureEvent({
7118
+ id: eventId,
7119
+ timestamp,
7120
+ type: "click",
7121
+ url: click.url,
7122
+ label: click.label,
7123
+ tagName: click.tagName,
7124
+ testId: click.testId,
7125
+ fingerprint: click.fingerprint,
7126
+ textContaining: click.textContaining,
7127
+ metadata: buildCaptureEventMetadata(interactiveTarget, "click")
7128
+ });
6962
7129
  void captureScreenshot().then((beforeScreenshot) => {
6963
- appendCaptureEvent({
6964
- id: eventId,
6965
- timestamp,
6966
- type: "click",
6967
- url: click.url,
6968
- label: click.label,
6969
- tagName: click.tagName,
6970
- testId: click.testId,
6971
- fingerprint: click.fingerprint,
6972
- textContaining: click.textContaining,
6973
- metadata: {
6974
- ...buildCaptureEventMetadata(interactiveTarget, "click"),
6975
- beforeScreenshot
6976
- }
6977
- });
7130
+ updateCaptureEvent(eventId, { beforeScreenshot });
6978
7131
  window.setTimeout(async () => {
6979
7132
  try {
6980
7133
  const afterScreenshot = await captureScreenshot();
@@ -6985,18 +7138,6 @@ function useRecordingMode({
6985
7138
  }, 1200);
6986
7139
  }).catch((err) => {
6987
7140
  console.warn("[Recording] Failed to capture before-click screenshot:", err);
6988
- appendCaptureEvent({
6989
- id: eventId,
6990
- timestamp,
6991
- type: "click",
6992
- url: click.url,
6993
- label: click.label,
6994
- tagName: click.tagName,
6995
- testId: click.testId,
6996
- fingerprint: click.fingerprint,
6997
- textContaining: click.textContaining,
6998
- metadata: buildCaptureEventMetadata(interactiveTarget, "click")
6999
- });
7000
7141
  });
7001
7142
  };
7002
7143
  const handleInputEvent = (event) => {
@@ -7042,10 +7183,24 @@ function useRecordingMode({
7042
7183
  window.clearInterval(routePoll);
7043
7184
  };
7044
7185
  }, [appendCaptureEvent, isRecording]);
7186
+ useEffect15(() => {
7187
+ if (!isRecording || typeof window === "undefined") return;
7188
+ const flushRecordingSession = () => {
7189
+ persistSnapshot();
7190
+ };
7191
+ window.addEventListener("pagehide", flushRecordingSession);
7192
+ window.addEventListener("beforeunload", flushRecordingSession);
7193
+ return () => {
7194
+ window.removeEventListener("pagehide", flushRecordingSession);
7195
+ window.removeEventListener("beforeunload", flushRecordingSession);
7196
+ };
7197
+ }, [isRecording, persistSnapshot]);
7045
7198
  const stopRecording = useCallback10(async (tourName, targetUserTypes) => {
7199
+ phaseRef.current = "finishing";
7046
7200
  setPhase("finishing");
7047
7201
  shouldKeepVoiceCaptureRef.current = false;
7048
7202
  stopBackgroundVoiceCapture();
7203
+ persistSnapshot({ phase: "finishing", voiceCaptureEnabled: false });
7049
7204
  if (steps.length === 0 && captureEventsRef.current.length <= 1) {
7050
7205
  safeSpeak("No workflow captured yet.");
7051
7206
  setPhase("idle");
@@ -7091,11 +7246,16 @@ function useRecordingMode({
7091
7246
  throw new Error(errorMessage);
7092
7247
  }
7093
7248
  const data = await res.json();
7249
+ stepsRef.current = [];
7094
7250
  setSteps([]);
7251
+ captureEventsRef.current = [];
7095
7252
  setCaptureEvents([]);
7253
+ capturedTranscriptRef.current = "";
7096
7254
  setCapturedTranscript("");
7097
7255
  pendingClicksRef.current = [];
7256
+ phaseRef.current = "idle";
7098
7257
  setPhase("idle");
7258
+ clearPersistedRecordingSession();
7099
7259
  const savedStepCount = Array.isArray(data?.tour?.steps) ? data.tour.steps.length : 0;
7100
7260
  const itemLabel = experienceType === "onboarding" ? "workflow" : "tour";
7101
7261
  safeSpeak(`${itemLabel === "tour" ? "Tour" : "Workflow"} "${tourName}" saved as a draft with ${savedStepCount} generated steps. Review and preview it before publishing.`);
@@ -7103,19 +7263,23 @@ function useRecordingMode({
7103
7263
  } catch (err) {
7104
7264
  console.error("[Recording] Failed to save tour:", err);
7105
7265
  safeSpeak(`Failed to save the ${experienceType === "onboarding" ? "workflow" : "tour"}. Please try again.`);
7266
+ phaseRef.current = "active";
7106
7267
  setPhase("active");
7268
+ persistSnapshot({ phase: "active" });
7107
7269
  return null;
7108
7270
  }
7109
- }, [capturedTranscript, experienceType, safeSpeak, serverUrl, stopBackgroundVoiceCapture, steps, toursApiBase, websiteId]);
7271
+ }, [capturedTranscript, experienceType, persistSnapshot, safeSpeak, serverUrl, stopBackgroundVoiceCapture, steps, toursApiBase, websiteId]);
7110
7272
  const toggleVoiceCapture = useCallback10(() => {
7111
7273
  if (isVoiceCaptureActive || shouldKeepVoiceCaptureRef.current) {
7112
7274
  shouldKeepVoiceCaptureRef.current = false;
7113
7275
  stopBackgroundVoiceCapture();
7276
+ persistSnapshot({ voiceCaptureEnabled: false });
7114
7277
  return;
7115
7278
  }
7116
7279
  shouldKeepVoiceCaptureRef.current = true;
7280
+ persistSnapshot({ voiceCaptureEnabled: true });
7117
7281
  startBackgroundVoiceCapture();
7118
- }, [isVoiceCaptureActive, startBackgroundVoiceCapture, stopBackgroundVoiceCapture]);
7282
+ }, [isVoiceCaptureActive, persistSnapshot, startBackgroundVoiceCapture, stopBackgroundVoiceCapture]);
7119
7283
  useEffect15(() => {
7120
7284
  if (!isRecording) return;
7121
7285
  if (phase === "active" && shouldKeepVoiceCaptureRef.current && !isVoiceCaptureActive) {
@@ -9090,7 +9254,9 @@ function ModelNexChatBubble({
9090
9254
  const [savedDraft, setSavedDraft] = useState13(null);
9091
9255
  const [reviewDraft, setReviewDraft] = useState13("");
9092
9256
  const [tourLiveTranscript, setTourLiveTranscript] = useState13("");
9093
- const [activeRecordingExperienceType, setActiveRecordingExperienceType] = useState13(recordingExperienceType);
9257
+ const [activeRecordingExperienceType, setActiveRecordingExperienceType] = useState13(
9258
+ () => readPersistedRecordingExperienceType() ?? recordingExperienceType
9259
+ );
9094
9260
  const recording = useRecordingMode({
9095
9261
  serverUrl,
9096
9262
  toursApiBase: ctx?.toursApiBase,
@@ -9100,6 +9266,16 @@ function ModelNexChatBubble({
9100
9266
  });
9101
9267
  const isGeneratingDraft = isRecordingDraftGenerating(recording.phase);
9102
9268
  const showRecordingOverlay = recordingMode && shouldShowRecordingOverlay(recording.phase);
9269
+ useEffect17(() => {
9270
+ const shouldBeRecording = recording.phase !== "idle";
9271
+ if (shouldBeRecording && !recordingMode) {
9272
+ setRecordingMode(true);
9273
+ return;
9274
+ }
9275
+ if (!shouldBeRecording && recordingMode && !showStopModal && !savedDraft) {
9276
+ setRecordingMode(false);
9277
+ }
9278
+ }, [recording.phase, recordingMode, savedDraft, setRecordingMode, showStopModal]);
9103
9279
  const playbackController = useExperiencePlaybackController({
9104
9280
  serverUrl,
9105
9281
  commandUrl: ctx?.commandUrl,
@@ -11605,7 +11781,7 @@ var ModelNexProvider = ({
11605
11781
  const [executedFields, setExecutedFields] = useState15(/* @__PURE__ */ new Set());
11606
11782
  const [highlightActions, setHighlightActions] = useState15(false);
11607
11783
  const [studioMode, setStudioMode] = useState15(false);
11608
- const [recordingMode, setRecordingMode] = useState15(false);
11784
+ const [recordingMode, setRecordingMode] = useState15(() => hasPersistedRecordingSession());
11609
11785
  const [voiceMuted, setVoiceMuted] = useState15(false);
11610
11786
  const [socketId, setSocketId] = useState15(null);
11611
11787
  const [actions, setActions] = useState15(/* @__PURE__ */ new Map());
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@modelnex/sdk",
3
- "version": "0.5.32",
3
+ "version": "0.5.34",
4
4
  "description": "React SDK for natural language control of web apps via AI agents",
5
5
  "main": "./dist/index.js",
6
6
  "module": "./dist/index.mjs",