markupr 2.1.8 → 2.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +292 -15
- package/dist/cli/index.mjs +3593 -0
- package/dist/main/index.mjs +743 -220
- package/dist/mcp/index.mjs +4053 -0
- package/package.json +32 -7
- package/.claude/commands/review-feedback.md +0 -47
- package/.eslintrc.json +0 -35
- package/.github/CODEOWNERS +0 -16
- package/.github/FUNDING.yml +0 -1
- package/.github/ISSUE_TEMPLATE/bug_report.md +0 -56
- package/.github/ISSUE_TEMPLATE/feature_request.md +0 -54
- package/.github/PULL_REQUEST_TEMPLATE.md +0 -89
- package/.github/dependabot.yml +0 -70
- package/.github/workflows/ci.yml +0 -184
- package/.github/workflows/deploy-landing.yml +0 -134
- package/.github/workflows/nightly.yml +0 -288
- package/.github/workflows/release.yml +0 -318
- package/CHANGELOG.md +0 -127
- package/CLAUDE.md +0 -137
- package/CODE_OF_CONDUCT.md +0 -9
- package/CONTRIBUTING.md +0 -390
- package/PRODUCT_VISION.md +0 -277
- package/SECURITY.md +0 -51
- package/SIGNING_INSTRUCTIONS.md +0 -284
- package/assets/DMG_BACKGROUND_INSTRUCTIONS.md +0 -130
- package/assets/svg-source/dmg-background.svg +0 -70
- package/assets/svg-source/icon.svg +0 -20
- package/assets/svg-source/tray-icon-processing.svg +0 -7
- package/assets/svg-source/tray-icon-recording.svg +0 -7
- package/assets/svg-source/tray-icon.svg +0 -6
- package/assets/tray-complete.png +0 -0
- package/assets/tray-complete@2x.png +0 -0
- package/assets/tray-completeTemplate.png +0 -0
- package/assets/tray-completeTemplate@2x.png +0 -0
- package/assets/tray-error.png +0 -0
- package/assets/tray-error@2x.png +0 -0
- package/assets/tray-errorTemplate.png +0 -0
- package/assets/tray-errorTemplate@2x.png +0 -0
- package/assets/tray-icon-processing.png +0 -0
- package/assets/tray-icon-processing@2x.png +0 -0
- package/assets/tray-icon-processingTemplate.png +0 -0
- package/assets/tray-icon-processingTemplate@2x.png +0 -0
- package/assets/tray-icon-recording.png +0 -0
- package/assets/tray-icon-recording@2x.png +0 -0
- package/assets/tray-icon-recordingTemplate.png +0 -0
- package/assets/tray-icon-recordingTemplate@2x.png +0 -0
- package/assets/tray-icon.png +0 -0
- package/assets/tray-icon@2x.png +0 -0
- package/assets/tray-iconTemplate.png +0 -0
- package/assets/tray-iconTemplate@2x.png +0 -0
- package/assets/tray-idle.png +0 -0
- package/assets/tray-idle@2x.png +0 -0
- package/assets/tray-idleTemplate.png +0 -0
- package/assets/tray-idleTemplate@2x.png +0 -0
- package/assets/tray-processing-0.png +0 -0
- package/assets/tray-processing-0@2x.png +0 -0
- package/assets/tray-processing-0Template.png +0 -0
- package/assets/tray-processing-0Template@2x.png +0 -0
- package/assets/tray-processing-1.png +0 -0
- package/assets/tray-processing-1@2x.png +0 -0
- package/assets/tray-processing-1Template.png +0 -0
- package/assets/tray-processing-1Template@2x.png +0 -0
- package/assets/tray-processing-2.png +0 -0
- package/assets/tray-processing-2@2x.png +0 -0
- package/assets/tray-processing-2Template.png +0 -0
- package/assets/tray-processing-2Template@2x.png +0 -0
- package/assets/tray-processing-3.png +0 -0
- package/assets/tray-processing-3@2x.png +0 -0
- package/assets/tray-processing-3Template.png +0 -0
- package/assets/tray-processing-3Template@2x.png +0 -0
- package/assets/tray-processing.png +0 -0
- package/assets/tray-processing@2x.png +0 -0
- package/assets/tray-processingTemplate.png +0 -0
- package/assets/tray-processingTemplate@2x.png +0 -0
- package/assets/tray-recording.png +0 -0
- package/assets/tray-recording@2x.png +0 -0
- package/assets/tray-recordingTemplate.png +0 -0
- package/assets/tray-recordingTemplate@2x.png +0 -0
- package/build/DMG_BACKGROUND_SPEC.md +0 -50
- package/build/dmg-background.png +0 -0
- package/build/dmg-background@2x.png +0 -0
- package/build/entitlements.mac.inherit.plist +0 -27
- package/build/entitlements.mac.plist +0 -41
- package/build/favicon-16.png +0 -0
- package/build/favicon-180.png +0 -0
- package/build/favicon-192.png +0 -0
- package/build/favicon-32.png +0 -0
- package/build/favicon-48.png +0 -0
- package/build/favicon-512.png +0 -0
- package/build/favicon-64.png +0 -0
- package/build/icon-128.png +0 -0
- package/build/icon-16.png +0 -0
- package/build/icon-24.png +0 -0
- package/build/icon-256.png +0 -0
- package/build/icon-32.png +0 -0
- package/build/icon-48.png +0 -0
- package/build/icon-64.png +0 -0
- package/build/icon.icns +0 -0
- package/build/icon.ico +0 -0
- package/build/icon.iconset/icon_128x128.png +0 -0
- package/build/icon.iconset/icon_128x128@2x.png +0 -0
- package/build/icon.iconset/icon_16x16.png +0 -0
- package/build/icon.iconset/icon_16x16@2x.png +0 -0
- package/build/icon.iconset/icon_256x256.png +0 -0
- package/build/icon.iconset/icon_256x256@2x.png +0 -0
- package/build/icon.iconset/icon_32x32.png +0 -0
- package/build/icon.iconset/icon_32x32@2x.png +0 -0
- package/build/icon.iconset/icon_512x512.png +0 -0
- package/build/icon.iconset/icon_512x512@2x.png +0 -0
- package/build/icon.png +0 -0
- package/build/installer-header.bmp +0 -0
- package/build/installer-header.png +0 -0
- package/build/installer-sidebar.bmp +0 -0
- package/build/installer-sidebar.png +0 -0
- package/build/installer.nsh +0 -45
- package/build/overlay-processing.png +0 -0
- package/build/overlay-recording.png +0 -0
- package/build/toolbar-record.png +0 -0
- package/build/toolbar-screenshot.png +0 -0
- package/build/toolbar-settings.png +0 -0
- package/build/toolbar-stop.png +0 -0
- package/dist/preload/index.mjs +0 -907
- package/dist/renderer/assets/index-CCmUjl9K.js +0 -19495
- package/dist/renderer/assets/index-CUqz_Gs6.css +0 -2270
- package/dist/renderer/index.html +0 -27
- package/docs/AI_AGENT_QUICKSTART.md +0 -42
- package/docs/AI_PIPELINE_DESIGN.md +0 -595
- package/docs/API.md +0 -514
- package/docs/ARCHITECTURE.md +0 -460
- package/docs/CONFIGURATION.md +0 -336
- package/docs/DEVELOPMENT.md +0 -508
- package/docs/EXPORT_FORMATS.md +0 -451
- package/docs/GETTING_STARTED.md +0 -236
- package/docs/KEYBOARD_SHORTCUTS.md +0 -334
- package/docs/TROUBLESHOOTING.md +0 -418
- package/docs/landing/index.html +0 -672
- package/docs/landing/script.js +0 -342
- package/docs/landing/styles.css +0 -1543
- package/electron-builder.yml +0 -140
- package/electron.vite.config.ts +0 -63
- package/railway.json +0 -12
- package/scripts/build.mjs +0 -51
- package/scripts/generate-icons.mjs +0 -314
- package/scripts/generate-installer-images.cjs +0 -253
- package/scripts/generate-tray-icons.mjs +0 -258
- package/scripts/notarize.cjs +0 -180
- package/scripts/one-click-clean-test.sh +0 -147
- package/scripts/postinstall.mjs +0 -36
- package/scripts/setup-markupr.sh +0 -55
- package/setup +0 -17
- package/site/index.html +0 -1835
- package/site/package.json +0 -11
- package/site/railway.json +0 -12
- package/site/server.js +0 -31
- package/src/main/AutoUpdater.ts +0 -392
- package/src/main/CrashRecovery.ts +0 -655
- package/src/main/ErrorHandler.ts +0 -703
- package/src/main/HotkeyManager.ts +0 -399
- package/src/main/MenuManager.ts +0 -529
- package/src/main/PermissionManager.ts +0 -420
- package/src/main/SessionController.ts +0 -1465
- package/src/main/TrayManager.ts +0 -540
- package/src/main/ai/AIPipelineManager.ts +0 -199
- package/src/main/ai/ClaudeAnalyzer.ts +0 -339
- package/src/main/ai/ImageOptimizer.ts +0 -176
- package/src/main/ai/StructuredMarkdownBuilder.ts +0 -379
- package/src/main/ai/index.ts +0 -16
- package/src/main/ai/types.ts +0 -258
- package/src/main/analysis/ClarificationGenerator.ts +0 -385
- package/src/main/analysis/FeedbackAnalyzer.ts +0 -531
- package/src/main/analysis/index.ts +0 -19
- package/src/main/audio/AudioCapture.ts +0 -978
- package/src/main/audio/audioUtils.ts +0 -100
- package/src/main/audio/index.ts +0 -20
- package/src/main/capture/index.ts +0 -1
- package/src/main/index.ts +0 -1693
- package/src/main/ipc/captureHandlers.ts +0 -272
- package/src/main/ipc/index.ts +0 -45
- package/src/main/ipc/outputHandlers.ts +0 -302
- package/src/main/ipc/sessionHandlers.ts +0 -56
- package/src/main/ipc/settingsHandlers.ts +0 -471
- package/src/main/ipc/types.ts +0 -56
- package/src/main/ipc/windowHandlers.ts +0 -277
- package/src/main/output/ClipboardService.ts +0 -369
- package/src/main/output/ExportService.ts +0 -539
- package/src/main/output/FileManager.ts +0 -416
- package/src/main/output/MarkdownGenerator.ts +0 -791
- package/src/main/output/MarkdownPatcher.ts +0 -299
- package/src/main/output/index.ts +0 -186
- package/src/main/output/sessionAdapter.ts +0 -207
- package/src/main/output/templates/html-template.ts +0 -553
- package/src/main/pipeline/FrameExtractor.ts +0 -330
- package/src/main/pipeline/PostProcessor.ts +0 -399
- package/src/main/pipeline/TranscriptAnalyzer.ts +0 -226
- package/src/main/pipeline/index.ts +0 -36
- package/src/main/platform/WindowsTaskbar.ts +0 -600
- package/src/main/platform/index.ts +0 -16
- package/src/main/settings/SettingsManager.ts +0 -730
- package/src/main/settings/index.ts +0 -19
- package/src/main/transcription/ModelDownloadManager.ts +0 -494
- package/src/main/transcription/TierManager.ts +0 -219
- package/src/main/transcription/TranscriptionRecoveryService.ts +0 -340
- package/src/main/transcription/WhisperService.ts +0 -748
- package/src/main/transcription/index.ts +0 -56
- package/src/main/transcription/types.ts +0 -135
- package/src/main/windows/PopoverManager.ts +0 -284
- package/src/main/windows/TaskbarIntegration.ts +0 -452
- package/src/main/windows/index.ts +0 -23
- package/src/preload/index.ts +0 -1047
- package/src/renderer/App.tsx +0 -515
- package/src/renderer/AppWrapper.tsx +0 -28
- package/src/renderer/assets/logo-dark.svg +0 -7
- package/src/renderer/assets/logo.svg +0 -7
- package/src/renderer/audio/AudioCaptureRenderer.ts +0 -454
- package/src/renderer/capture/ScreenRecordingRenderer.ts +0 -492
- package/src/renderer/components/AnnotationOverlay.tsx +0 -836
- package/src/renderer/components/AudioWaveform.tsx +0 -811
- package/src/renderer/components/ClarificationQuestions.tsx +0 -656
- package/src/renderer/components/CountdownTimer.tsx +0 -495
- package/src/renderer/components/CrashRecoveryDialog.tsx +0 -632
- package/src/renderer/components/DonateButton.tsx +0 -127
- package/src/renderer/components/ErrorBoundary.tsx +0 -308
- package/src/renderer/components/ExportDialog.tsx +0 -872
- package/src/renderer/components/HotkeyHint.tsx +0 -261
- package/src/renderer/components/KeyboardShortcuts.tsx +0 -787
- package/src/renderer/components/ModelDownloadDialog.tsx +0 -844
- package/src/renderer/components/Onboarding.tsx +0 -1830
- package/src/renderer/components/ProcessingOverlay.tsx +0 -157
- package/src/renderer/components/RecordingOverlay.tsx +0 -423
- package/src/renderer/components/SessionHistory.tsx +0 -1746
- package/src/renderer/components/SessionReview.tsx +0 -1321
- package/src/renderer/components/SettingsPanel.tsx +0 -217
- package/src/renderer/components/Skeleton.tsx +0 -347
- package/src/renderer/components/StatusIndicator.tsx +0 -86
- package/src/renderer/components/ThemeProvider.tsx +0 -429
- package/src/renderer/components/Tooltip.tsx +0 -370
- package/src/renderer/components/TranscriptionPreview.tsx +0 -183
- package/src/renderer/components/TranscriptionTierSelector.tsx +0 -640
- package/src/renderer/components/UpdateNotification.tsx +0 -377
- package/src/renderer/components/WindowSelector.tsx +0 -947
- package/src/renderer/components/index.ts +0 -99
- package/src/renderer/components/primitives/ApiKeyInput.tsx +0 -98
- package/src/renderer/components/primitives/ColorPicker.tsx +0 -65
- package/src/renderer/components/primitives/DangerButton.tsx +0 -45
- package/src/renderer/components/primitives/DirectoryPicker.tsx +0 -41
- package/src/renderer/components/primitives/Dropdown.tsx +0 -34
- package/src/renderer/components/primitives/KeyRecorder.tsx +0 -117
- package/src/renderer/components/primitives/SettingsSection.tsx +0 -32
- package/src/renderer/components/primitives/Slider.tsx +0 -43
- package/src/renderer/components/primitives/Toggle.tsx +0 -36
- package/src/renderer/components/primitives/index.ts +0 -10
- package/src/renderer/components/settings/AdvancedTab.tsx +0 -174
- package/src/renderer/components/settings/AppearanceTab.tsx +0 -77
- package/src/renderer/components/settings/GeneralTab.tsx +0 -40
- package/src/renderer/components/settings/HotkeysTab.tsx +0 -79
- package/src/renderer/components/settings/RecordingTab.tsx +0 -84
- package/src/renderer/components/settings/index.ts +0 -9
- package/src/renderer/components/settings/settingsStyles.ts +0 -673
- package/src/renderer/components/settings/tabConfig.tsx +0 -85
- package/src/renderer/components/settings/useSettingsPanel.ts +0 -447
- package/src/renderer/contexts/ProcessingContext.tsx +0 -227
- package/src/renderer/contexts/RecordingContext.tsx +0 -683
- package/src/renderer/contexts/UIContext.tsx +0 -326
- package/src/renderer/contexts/index.ts +0 -24
- package/src/renderer/donateMessages.ts +0 -69
- package/src/renderer/hooks/index.ts +0 -75
- package/src/renderer/hooks/useAnimation.tsx +0 -544
- package/src/renderer/hooks/useTheme.ts +0 -313
- package/src/renderer/index.html +0 -26
- package/src/renderer/main.tsx +0 -52
- package/src/renderer/styles/animations.css +0 -1093
- package/src/renderer/styles/app-shell.css +0 -662
- package/src/renderer/styles/globals.css +0 -515
- package/src/renderer/styles/theme.ts +0 -578
- package/src/renderer/types/electron.d.ts +0 -385
- package/src/shared/hotkeys.ts +0 -283
- package/src/shared/types.ts +0 -809
- package/tests/clipboard.test.ts +0 -228
- package/tests/e2e/criticalPaths.test.ts +0 -594
- package/tests/feedbackAnalyzer.test.ts +0 -303
- package/tests/integration/sessionFlow.test.ts +0 -583
- package/tests/markdownGenerator.test.ts +0 -418
- package/tests/output.test.ts +0 -96
- package/tests/setup.ts +0 -486
- package/tests/unit/appIntegration.test.ts +0 -676
- package/tests/unit/appViewState.test.ts +0 -281
- package/tests/unit/audioIpcChannels.test.ts +0 -17
- package/tests/unit/exportService.test.ts +0 -492
- package/tests/unit/hotkeys.test.ts +0 -92
- package/tests/unit/navigationPreload.test.ts +0 -94
- package/tests/unit/onboardingFlow.test.ts +0 -345
- package/tests/unit/permissionManager.test.ts +0 -175
- package/tests/unit/permissionManagerExpanded.test.ts +0 -296
- package/tests/unit/screenRecordingRenderer.test.ts +0 -368
- package/tests/unit/sessionController.test.ts +0 -515
- package/tests/unit/tierManager.test.ts +0 -61
- package/tests/unit/tierManagerExpanded.test.ts +0 -142
- package/tests/unit/transcriptAnalyzer.test.ts +0 -64
- package/tsconfig.json +0 -25
- package/vitest.config.ts +0 -46
|
@@ -0,0 +1,4053 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
var __require = /* @__PURE__ */ ((x) => typeof require !== "undefined" ? require : typeof Proxy !== "undefined" ? new Proxy(x, {
|
|
3
|
+
get: (a, b) => (typeof require !== "undefined" ? require : a)[b]
|
|
4
|
+
}) : x)(function(x) {
|
|
5
|
+
if (typeof require !== "undefined") return require.apply(this, arguments);
|
|
6
|
+
throw Error('Dynamic require of "' + x + '" is not supported');
|
|
7
|
+
});
|
|
8
|
+
|
|
9
|
+
// src/mcp/index.ts
|
|
10
|
+
import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
|
|
11
|
+
|
|
12
|
+
// src/mcp/server.ts
|
|
13
|
+
import { McpServer as McpServer2 } from "@modelcontextprotocol/sdk/server/mcp.js";
|
|
14
|
+
|
|
15
|
+
// src/mcp/tools/captureScreenshot.ts
|
|
16
|
+
import { z } from "zod";
|
|
17
|
+
import { join as join2 } from "path";
|
|
18
|
+
import { readdir as readdir2 } from "fs/promises";
|
|
19
|
+
|
|
20
|
+
// src/mcp/capture/ScreenCapture.ts
|
|
21
|
+
import { execFile as execFileCb } from "child_process";
|
|
22
|
+
import { stat } from "fs/promises";
|
|
23
|
+
import { resolve } from "path";
|
|
24
|
+
|
|
25
|
+
// src/mcp/utils/Logger.ts
|
|
26
|
+
function log(message) {
|
|
27
|
+
process.stderr.write(`[markupr-mcp] ${message}
|
|
28
|
+
`);
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
// src/mcp/capture/ScreenCapture.ts
|
|
32
|
+
var SAFE_CHILD_ENV = {
|
|
33
|
+
PATH: process.env.PATH,
|
|
34
|
+
HOME: process.env.HOME || process.env.USERPROFILE,
|
|
35
|
+
USERPROFILE: process.env.USERPROFILE,
|
|
36
|
+
LANG: process.env.LANG,
|
|
37
|
+
TMPDIR: process.env.TMPDIR || process.env.TEMP,
|
|
38
|
+
TEMP: process.env.TEMP
|
|
39
|
+
};
|
|
40
|
+
async function capture(options) {
|
|
41
|
+
const display = options.display ?? 1;
|
|
42
|
+
const outputPath = resolve(options.outputPath);
|
|
43
|
+
const args = ["-x", `-D${display}`, outputPath];
|
|
44
|
+
log(`Capturing screenshot: display=${display}, output=${outputPath}`);
|
|
45
|
+
await new Promise((resolve4, reject) => {
|
|
46
|
+
execFileCb("screencapture", args, { env: SAFE_CHILD_ENV }, (error) => {
|
|
47
|
+
if (error) {
|
|
48
|
+
reject(
|
|
49
|
+
new Error(
|
|
50
|
+
`screencapture failed: ${error.message}
|
|
51
|
+
Ensure Screen Recording permission is granted in System Settings \u2192 Privacy & Security \u2192 Screen Recording.`
|
|
52
|
+
)
|
|
53
|
+
);
|
|
54
|
+
return;
|
|
55
|
+
}
|
|
56
|
+
resolve4();
|
|
57
|
+
});
|
|
58
|
+
});
|
|
59
|
+
let fileStats;
|
|
60
|
+
try {
|
|
61
|
+
fileStats = await stat(outputPath);
|
|
62
|
+
} catch {
|
|
63
|
+
throw new Error(
|
|
64
|
+
`Screenshot file not created at ${outputPath}.
|
|
65
|
+
Ensure Screen Recording permission is granted in System Settings \u2192 Privacy & Security \u2192 Screen Recording.`
|
|
66
|
+
);
|
|
67
|
+
}
|
|
68
|
+
if (!fileStats.isFile() || fileStats.size === 0) {
|
|
69
|
+
throw new Error(
|
|
70
|
+
"Screenshot captured but file is empty (0 bytes). This typically means Screen Recording permission is not granted.\nGrant permission in System Settings \u2192 Privacy & Security \u2192 Screen Recording."
|
|
71
|
+
);
|
|
72
|
+
}
|
|
73
|
+
log(`Screenshot captured: ${outputPath} (${fileStats.size} bytes)`);
|
|
74
|
+
return outputPath;
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
// src/mcp/utils/ImageOptimizer.ts
|
|
78
|
+
import sharp from "sharp";
|
|
79
|
+
import { resolve as resolve2 } from "path";
|
|
80
|
+
var DEFAULT_MAX_WIDTH = 1920;
|
|
81
|
+
var DEFAULT_QUALITY = 85;
|
|
82
|
+
async function optimize(inputPath, outputPath, options) {
|
|
83
|
+
const resolvedInput = resolve2(inputPath);
|
|
84
|
+
const resolvedOutput = resolve2(outputPath ?? inputPath);
|
|
85
|
+
const maxWidth = options?.maxWidth ?? DEFAULT_MAX_WIDTH;
|
|
86
|
+
const quality = options?.quality ?? DEFAULT_QUALITY;
|
|
87
|
+
const metadata = await sharp(resolvedInput).metadata();
|
|
88
|
+
const originalWidth = metadata.width ?? 0;
|
|
89
|
+
const originalHeight = metadata.height ?? 0;
|
|
90
|
+
let pipeline = sharp(resolvedInput);
|
|
91
|
+
if (originalWidth > maxWidth) {
|
|
92
|
+
pipeline = pipeline.resize({ width: maxWidth, withoutEnlargement: true });
|
|
93
|
+
log(
|
|
94
|
+
`Resizing image: ${originalWidth}x${originalHeight} \u2192 max width ${maxWidth}px`
|
|
95
|
+
);
|
|
96
|
+
}
|
|
97
|
+
const ext = resolvedOutput.split(".").pop()?.toLowerCase();
|
|
98
|
+
if (ext === "jpg" || ext === "jpeg") {
|
|
99
|
+
pipeline = pipeline.jpeg({ quality });
|
|
100
|
+
} else {
|
|
101
|
+
pipeline = pipeline.png({ quality: Math.min(quality, 100) });
|
|
102
|
+
}
|
|
103
|
+
await pipeline.toFile(resolvedOutput);
|
|
104
|
+
log(`Image optimized: ${resolvedOutput}`);
|
|
105
|
+
return resolvedOutput;
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
// src/mcp/session/SessionStore.ts
|
|
109
|
+
import * as fs from "fs/promises";
|
|
110
|
+
import * as path from "path";
|
|
111
|
+
import * as os from "os";
|
|
112
|
+
var BASE_DIR = path.join(os.homedir(), "Documents", "markupr", "mcp");
|
|
113
|
+
var SessionStore = class {
|
|
114
|
+
baseDir;
|
|
115
|
+
constructor(baseDir) {
|
|
116
|
+
this.baseDir = baseDir ?? BASE_DIR;
|
|
117
|
+
}
|
|
118
|
+
/**
|
|
119
|
+
* Create a new session directory with metadata.json.
|
|
120
|
+
*/
|
|
121
|
+
async create(label) {
|
|
122
|
+
await fs.mkdir(this.baseDir, { recursive: true });
|
|
123
|
+
const now = Date.now();
|
|
124
|
+
const id = this.formatSessionId(now);
|
|
125
|
+
const sessionDir = path.join(this.baseDir, id);
|
|
126
|
+
let finalDir = sessionDir;
|
|
127
|
+
let counter = 1;
|
|
128
|
+
while (await this.exists(finalDir)) {
|
|
129
|
+
finalDir = path.join(this.baseDir, `${id}-${counter}`);
|
|
130
|
+
counter++;
|
|
131
|
+
}
|
|
132
|
+
const finalId = path.basename(finalDir);
|
|
133
|
+
await fs.mkdir(finalDir, { recursive: true });
|
|
134
|
+
await fs.mkdir(path.join(finalDir, "screenshots"), { recursive: true });
|
|
135
|
+
const session = {
|
|
136
|
+
id: finalId,
|
|
137
|
+
startTime: now,
|
|
138
|
+
label,
|
|
139
|
+
status: "recording"
|
|
140
|
+
};
|
|
141
|
+
const metadata = { ...session };
|
|
142
|
+
await this.writeMetadata(finalDir, metadata);
|
|
143
|
+
log(`Session created: ${finalId}`);
|
|
144
|
+
return session;
|
|
145
|
+
}
|
|
146
|
+
/**
|
|
147
|
+
* Read a session's metadata by ID.
|
|
148
|
+
*/
|
|
149
|
+
async get(id) {
|
|
150
|
+
const metadataPath = path.join(this.baseDir, id, "metadata.json");
|
|
151
|
+
try {
|
|
152
|
+
const raw = await fs.readFile(metadataPath, "utf-8");
|
|
153
|
+
return JSON.parse(raw);
|
|
154
|
+
} catch {
|
|
155
|
+
return null;
|
|
156
|
+
}
|
|
157
|
+
}
|
|
158
|
+
/**
|
|
159
|
+
* Return the most recent session (by startTime).
|
|
160
|
+
*/
|
|
161
|
+
async getLatest() {
|
|
162
|
+
const sessions = await this.list();
|
|
163
|
+
return sessions[0] ?? null;
|
|
164
|
+
}
|
|
165
|
+
/**
|
|
166
|
+
* List all sessions sorted by startTime descending.
|
|
167
|
+
*/
|
|
168
|
+
async list() {
|
|
169
|
+
const sessions = [];
|
|
170
|
+
try {
|
|
171
|
+
await fs.access(this.baseDir);
|
|
172
|
+
} catch {
|
|
173
|
+
return sessions;
|
|
174
|
+
}
|
|
175
|
+
let entries;
|
|
176
|
+
try {
|
|
177
|
+
entries = await fs.readdir(this.baseDir, { withFileTypes: true });
|
|
178
|
+
} catch {
|
|
179
|
+
return sessions;
|
|
180
|
+
}
|
|
181
|
+
for (const entry of entries) {
|
|
182
|
+
if (!entry.isDirectory()) continue;
|
|
183
|
+
const metadataPath = path.join(this.baseDir, entry.name, "metadata.json");
|
|
184
|
+
try {
|
|
185
|
+
const raw = await fs.readFile(metadataPath, "utf-8");
|
|
186
|
+
const session = JSON.parse(raw);
|
|
187
|
+
sessions.push(session);
|
|
188
|
+
} catch {
|
|
189
|
+
}
|
|
190
|
+
}
|
|
191
|
+
sessions.sort((a, b) => b.startTime - a.startTime);
|
|
192
|
+
return sessions;
|
|
193
|
+
}
|
|
194
|
+
/**
|
|
195
|
+
* Update a session's metadata (partial merge).
|
|
196
|
+
*/
|
|
197
|
+
async update(id, data) {
|
|
198
|
+
const sessionDir = path.join(this.baseDir, id);
|
|
199
|
+
const metadataPath = path.join(sessionDir, "metadata.json");
|
|
200
|
+
let existing;
|
|
201
|
+
try {
|
|
202
|
+
const raw = await fs.readFile(metadataPath, "utf-8");
|
|
203
|
+
existing = JSON.parse(raw);
|
|
204
|
+
} catch {
|
|
205
|
+
throw new Error(`Session not found: ${id}`);
|
|
206
|
+
}
|
|
207
|
+
const updated = { ...existing, ...data };
|
|
208
|
+
await this.writeMetadata(sessionDir, updated);
|
|
209
|
+
log(`Session updated: ${id}`);
|
|
210
|
+
}
|
|
211
|
+
/**
|
|
212
|
+
* Get the absolute path to a session directory.
|
|
213
|
+
*/
|
|
214
|
+
getSessionDir(id) {
|
|
215
|
+
return path.join(this.baseDir, id);
|
|
216
|
+
}
|
|
217
|
+
/**
|
|
218
|
+
* Format a timestamp into the session ID pattern: mcp-YYYYMMDD-HHMMSS
|
|
219
|
+
*/
|
|
220
|
+
formatSessionId(timestamp) {
|
|
221
|
+
const date = new Date(timestamp);
|
|
222
|
+
const dateStr = [
|
|
223
|
+
date.getFullYear(),
|
|
224
|
+
String(date.getMonth() + 1).padStart(2, "0"),
|
|
225
|
+
String(date.getDate()).padStart(2, "0")
|
|
226
|
+
].join("");
|
|
227
|
+
const timeStr = [
|
|
228
|
+
String(date.getHours()).padStart(2, "0"),
|
|
229
|
+
String(date.getMinutes()).padStart(2, "0"),
|
|
230
|
+
String(date.getSeconds()).padStart(2, "0")
|
|
231
|
+
].join("");
|
|
232
|
+
return `mcp-${dateStr}-${timeStr}`;
|
|
233
|
+
}
|
|
234
|
+
async writeMetadata(sessionDir, metadata) {
|
|
235
|
+
const metadataPath = path.join(sessionDir, "metadata.json");
|
|
236
|
+
await fs.writeFile(metadataPath, JSON.stringify(metadata, null, 2), "utf-8");
|
|
237
|
+
}
|
|
238
|
+
async exists(dir) {
|
|
239
|
+
try {
|
|
240
|
+
await fs.access(dir);
|
|
241
|
+
return true;
|
|
242
|
+
} catch {
|
|
243
|
+
return false;
|
|
244
|
+
}
|
|
245
|
+
}
|
|
246
|
+
};
|
|
247
|
+
var sessionStore = new SessionStore();
|
|
248
|
+
|
|
249
|
+
// src/mcp/tools/captureScreenshot.ts
|
|
250
|
+
function register(server) {
|
|
251
|
+
server.tool(
|
|
252
|
+
"capture_screenshot",
|
|
253
|
+
"Take a screenshot of the current screen, optimize it, and save to the session directory. Returns a markdown image reference.",
|
|
254
|
+
{
|
|
255
|
+
label: z.string().optional().describe("Optional label for the screenshot"),
|
|
256
|
+
display: z.number().optional().default(1).describe("Display number (1-indexed)"),
|
|
257
|
+
optimize: z.boolean().optional().default(true).describe("Optimize image size with sharp")
|
|
258
|
+
},
|
|
259
|
+
async ({ label, display, optimize: shouldOptimize }) => {
|
|
260
|
+
try {
|
|
261
|
+
const session = await sessionStore.create(label);
|
|
262
|
+
const sessionDir = sessionStore.getSessionDir(session.id);
|
|
263
|
+
const screenshotsDir = join2(sessionDir, "screenshots");
|
|
264
|
+
const existing = await readdir2(screenshotsDir).catch(() => []);
|
|
265
|
+
const index = existing.filter((f) => f.startsWith("screenshot-")).length + 1;
|
|
266
|
+
const filename = `screenshot-${String(index).padStart(3, "0")}.png`;
|
|
267
|
+
const outputPath = join2(screenshotsDir, filename);
|
|
268
|
+
log(`Capturing screenshot: display=${display}, label=${label ?? "none"}`);
|
|
269
|
+
await capture({ display, outputPath });
|
|
270
|
+
if (shouldOptimize) {
|
|
271
|
+
await optimize(outputPath);
|
|
272
|
+
}
|
|
273
|
+
const markdownRef = ``;
|
|
274
|
+
return {
|
|
275
|
+
content: [
|
|
276
|
+
{
|
|
277
|
+
type: "text",
|
|
278
|
+
text: `Screenshot saved: ${outputPath}
|
|
279
|
+
${markdownRef}`
|
|
280
|
+
}
|
|
281
|
+
]
|
|
282
|
+
};
|
|
283
|
+
} catch (error) {
|
|
284
|
+
return {
|
|
285
|
+
content: [{ type: "text", text: `Error: ${error.message}` }],
|
|
286
|
+
isError: true
|
|
287
|
+
};
|
|
288
|
+
}
|
|
289
|
+
}
|
|
290
|
+
);
|
|
291
|
+
}
|
|
292
|
+
|
|
293
|
+
// src/mcp/tools/captureWithVoice.ts
|
|
294
|
+
import { z as z2 } from "zod";
|
|
295
|
+
import { join as join6 } from "path";
|
|
296
|
+
|
|
297
|
+
// src/mcp/capture/ScreenRecorder.ts
|
|
298
|
+
import { execFile as execFileCb2 } from "child_process";
|
|
299
|
+
import { stat as stat2 } from "fs/promises";
|
|
300
|
+
import { resolve as resolve3 } from "path";
|
|
301
|
+
var SAFE_CHILD_ENV2 = {
|
|
302
|
+
PATH: process.env.PATH,
|
|
303
|
+
HOME: process.env.HOME || process.env.USERPROFILE,
|
|
304
|
+
USERPROFILE: process.env.USERPROFILE,
|
|
305
|
+
LANG: process.env.LANG,
|
|
306
|
+
TMPDIR: process.env.TMPDIR || process.env.TEMP,
|
|
307
|
+
TEMP: process.env.TEMP
|
|
308
|
+
};
|
|
309
|
+
function buildFfmpegArgs(outputPath, videoDevice, audioDevice, duration) {
|
|
310
|
+
const input = `${videoDevice}:${audioDevice}`;
|
|
311
|
+
const args = [
|
|
312
|
+
"-f",
|
|
313
|
+
"avfoundation",
|
|
314
|
+
"-framerate",
|
|
315
|
+
"10",
|
|
316
|
+
"-i",
|
|
317
|
+
input,
|
|
318
|
+
"-vcodec",
|
|
319
|
+
"libx264",
|
|
320
|
+
"-preset",
|
|
321
|
+
"ultrafast",
|
|
322
|
+
"-acodec",
|
|
323
|
+
"aac",
|
|
324
|
+
"-strict",
|
|
325
|
+
"experimental"
|
|
326
|
+
];
|
|
327
|
+
if (duration !== void 0) {
|
|
328
|
+
args.push("-t", String(duration));
|
|
329
|
+
}
|
|
330
|
+
args.push("-y", outputPath);
|
|
331
|
+
return args;
|
|
332
|
+
}
|
|
333
|
+
async function record(options) {
|
|
334
|
+
const { duration } = options;
|
|
335
|
+
const outputPath = resolve3(options.outputPath);
|
|
336
|
+
const videoDevice = options.videoDevice ?? "1";
|
|
337
|
+
const audioDevice = options.audioDevice ?? "default";
|
|
338
|
+
const args = buildFfmpegArgs(outputPath, videoDevice, audioDevice, duration);
|
|
339
|
+
log(`Recording screen+audio: duration=${duration}s, output=${outputPath}`);
|
|
340
|
+
await new Promise((resolve4, reject) => {
|
|
341
|
+
execFileCb2(
|
|
342
|
+
"ffmpeg",
|
|
343
|
+
args,
|
|
344
|
+
{ env: SAFE_CHILD_ENV2, timeout: (duration + 30) * 1e3 },
|
|
345
|
+
(error) => {
|
|
346
|
+
if (error) {
|
|
347
|
+
reject(
|
|
348
|
+
new Error(
|
|
349
|
+
`Screen recording failed: ${error.message}
|
|
350
|
+
Ensure ffmpeg is installed and Screen Recording + Microphone permissions are granted.`
|
|
351
|
+
)
|
|
352
|
+
);
|
|
353
|
+
return;
|
|
354
|
+
}
|
|
355
|
+
resolve4();
|
|
356
|
+
}
|
|
357
|
+
);
|
|
358
|
+
});
|
|
359
|
+
await validateOutputFile(outputPath);
|
|
360
|
+
log(`Screen recording complete: ${outputPath}`);
|
|
361
|
+
return outputPath;
|
|
362
|
+
}
|
|
363
|
+
function start(options) {
|
|
364
|
+
const outputPath = resolve3(options.outputPath);
|
|
365
|
+
const videoDevice = options.videoDevice ?? "1";
|
|
366
|
+
const audioDevice = options.audioDevice ?? "default";
|
|
367
|
+
const args = buildFfmpegArgs(outputPath, videoDevice, audioDevice);
|
|
368
|
+
log(`Starting long-form recording: output=${outputPath}`);
|
|
369
|
+
const child = execFileCb2(
|
|
370
|
+
"ffmpeg",
|
|
371
|
+
args,
|
|
372
|
+
{ env: SAFE_CHILD_ENV2 },
|
|
373
|
+
(error) => {
|
|
374
|
+
if (error && !error.killed) {
|
|
375
|
+
log(`Recording process exited with error: ${error.message}`);
|
|
376
|
+
}
|
|
377
|
+
}
|
|
378
|
+
);
|
|
379
|
+
return child;
|
|
380
|
+
}
|
|
381
|
+
async function stop(process2) {
|
|
382
|
+
if (process2.exitCode !== null) {
|
|
383
|
+
log("Recording process already exited");
|
|
384
|
+
return;
|
|
385
|
+
}
|
|
386
|
+
log("Stopping recording (SIGINT \u2192 ffmpeg)...");
|
|
387
|
+
return new Promise((resolve4, reject) => {
|
|
388
|
+
const forceKillTimeout = setTimeout(() => {
|
|
389
|
+
log("Force-killing recording process (10s timeout exceeded)");
|
|
390
|
+
process2.kill("SIGKILL");
|
|
391
|
+
}, 1e4);
|
|
392
|
+
process2.once("exit", (code) => {
|
|
393
|
+
clearTimeout(forceKillTimeout);
|
|
394
|
+
log(`Recording process exited with code ${code}`);
|
|
395
|
+
resolve4();
|
|
396
|
+
});
|
|
397
|
+
process2.once("error", (err) => {
|
|
398
|
+
clearTimeout(forceKillTimeout);
|
|
399
|
+
reject(new Error(`Error stopping recording: ${err.message}`));
|
|
400
|
+
});
|
|
401
|
+
process2.kill("SIGINT");
|
|
402
|
+
});
|
|
403
|
+
}
|
|
404
|
+
async function validateOutputFile(outputPath) {
|
|
405
|
+
let fileStats;
|
|
406
|
+
try {
|
|
407
|
+
fileStats = await stat2(outputPath);
|
|
408
|
+
} catch {
|
|
409
|
+
throw new Error(
|
|
410
|
+
`Recording file not created at ${outputPath}.
|
|
411
|
+
Check Screen Recording and Microphone permissions in System Settings \u2192 Privacy & Security.`
|
|
412
|
+
);
|
|
413
|
+
}
|
|
414
|
+
if (!fileStats.isFile() || fileStats.size === 0) {
|
|
415
|
+
throw new Error(
|
|
416
|
+
"Recording file is empty (0 bytes). Permissions may not be granted.\nCheck System Settings \u2192 Privacy & Security \u2192 Screen Recording and Microphone."
|
|
417
|
+
);
|
|
418
|
+
}
|
|
419
|
+
}
|
|
420
|
+
|
|
421
|
+
// src/cli/CLIPipeline.ts
|
|
422
|
+
import { existsSync as existsSync3, mkdirSync as mkdirSync2 } from "fs";
|
|
423
|
+
import { stat as stat3, unlink as unlink2, writeFile as writeFile2, chmod as chmod2 } from "fs/promises";
|
|
424
|
+
import { join as join5, basename as basename3 } from "path";
|
|
425
|
+
import { execFile as execFileCb4 } from "child_process";
|
|
426
|
+
import { tmpdir as tmpdir2 } from "os";
|
|
427
|
+
import { randomUUID as randomUUID2 } from "crypto";
|
|
428
|
+
|
|
429
|
+
// src/main/pipeline/TranscriptAnalyzer.ts
|
|
430
|
+
var PAUSE_THRESHOLD_SECONDS = 1.5;
|
|
431
|
+
var PERIODIC_INTERVAL_SECONDS = 15;
|
|
432
|
+
var MAX_PERIODIC_INTERVAL_SECONDS = 20;
|
|
433
|
+
var MAX_KEY_MOMENTS = 20;
|
|
434
|
+
var FRAME_EDGE_MARGIN_SECONDS = 0.35;
|
|
435
|
+
var TranscriptAnalyzer = class {
|
|
436
|
+
/**
|
|
437
|
+
* Analyze transcript segments and return key moments where frames
|
|
438
|
+
* should be extracted from the video recording.
|
|
439
|
+
*
|
|
440
|
+
* @param segments - Array of transcript segments with timing info
|
|
441
|
+
* @param aiHints - Optional AI-informed key-moment hints to merge
|
|
442
|
+
* @returns Array of key moments sorted by timestamp, capped at 20
|
|
443
|
+
*/
|
|
444
|
+
analyze(segments, aiHints = []) {
|
|
445
|
+
if (segments.length === 0) {
|
|
446
|
+
return [];
|
|
447
|
+
}
|
|
448
|
+
const moments = [];
|
|
449
|
+
const firstSegment = segments[0];
|
|
450
|
+
const lastSegment = segments[segments.length - 1];
|
|
451
|
+
const sessionDuration = lastSegment.endTime - firstSegment.startTime;
|
|
452
|
+
const startOffset = sessionDuration > FRAME_EDGE_MARGIN_SECONDS ? FRAME_EDGE_MARGIN_SECONDS : 0;
|
|
453
|
+
moments.push({
|
|
454
|
+
timestamp: firstSegment.startTime + startOffset,
|
|
455
|
+
reason: "Session start",
|
|
456
|
+
confidence: 1
|
|
457
|
+
});
|
|
458
|
+
for (let i = 1; i < segments.length; i++) {
|
|
459
|
+
const prev = segments[i - 1];
|
|
460
|
+
const curr = segments[i];
|
|
461
|
+
const gap = curr.startTime - prev.endTime;
|
|
462
|
+
if (gap >= PAUSE_THRESHOLD_SECONDS) {
|
|
463
|
+
moments.push({
|
|
464
|
+
timestamp: prev.endTime,
|
|
465
|
+
reason: "Natural pause in narration",
|
|
466
|
+
confidence: Math.min(1, gap / 3)
|
|
467
|
+
// Longer pauses = higher confidence
|
|
468
|
+
});
|
|
469
|
+
}
|
|
470
|
+
}
|
|
471
|
+
if (lastSegment.endTime > firstSegment.startTime + startOffset) {
|
|
472
|
+
moments.push({
|
|
473
|
+
timestamp: lastSegment.endTime,
|
|
474
|
+
reason: "Session end",
|
|
475
|
+
confidence: 1
|
|
476
|
+
});
|
|
477
|
+
}
|
|
478
|
+
if (moments.length < 3 && aiHints.length === 0) {
|
|
479
|
+
if (sessionDuration > PERIODIC_INTERVAL_SECONDS) {
|
|
480
|
+
const rawCount = Math.floor(sessionDuration / PERIODIC_INTERVAL_SECONDS);
|
|
481
|
+
const interval = Math.min(
|
|
482
|
+
sessionDuration / rawCount,
|
|
483
|
+
MAX_PERIODIC_INTERVAL_SECONDS
|
|
484
|
+
);
|
|
485
|
+
for (let t = firstSegment.startTime + interval; t < lastSegment.endTime; t += interval) {
|
|
486
|
+
moments.push({
|
|
487
|
+
timestamp: t,
|
|
488
|
+
reason: "Periodic capture",
|
|
489
|
+
confidence: 0.5
|
|
490
|
+
});
|
|
491
|
+
}
|
|
492
|
+
}
|
|
493
|
+
}
|
|
494
|
+
for (const hint of aiHints) {
|
|
495
|
+
if (!Number.isFinite(hint.timestamp)) {
|
|
496
|
+
continue;
|
|
497
|
+
}
|
|
498
|
+
moments.push({
|
|
499
|
+
timestamp: Math.max(0, hint.timestamp),
|
|
500
|
+
reason: hint.reason?.trim() || "AI-highlighted context",
|
|
501
|
+
confidence: Math.max(0, Math.min(1, Number.isFinite(hint.confidence) ? hint.confidence : 0.8))
|
|
502
|
+
});
|
|
503
|
+
}
|
|
504
|
+
const deduped = this.deduplicateMoments(moments);
|
|
505
|
+
deduped.sort((a, b) => a.timestamp - b.timestamp);
|
|
506
|
+
if (deduped.length > MAX_KEY_MOMENTS) {
|
|
507
|
+
const first = deduped[0];
|
|
508
|
+
const last = deduped[deduped.length - 1];
|
|
509
|
+
const middle = deduped.slice(1, -1).sort((a, b) => {
|
|
510
|
+
const priorityDelta = this.momentPriority(b) - this.momentPriority(a);
|
|
511
|
+
if (priorityDelta !== 0) {
|
|
512
|
+
return priorityDelta;
|
|
513
|
+
}
|
|
514
|
+
return b.confidence - a.confidence;
|
|
515
|
+
}).slice(0, MAX_KEY_MOMENTS - 2);
|
|
516
|
+
const capped = [first, ...middle, last];
|
|
517
|
+
capped.sort((a, b) => a.timestamp - b.timestamp);
|
|
518
|
+
return capped;
|
|
519
|
+
}
|
|
520
|
+
return deduped;
|
|
521
|
+
}
|
|
522
|
+
/**
|
|
523
|
+
* Remove moments that are within 1 second of each other,
|
|
524
|
+
* keeping the one with higher confidence.
|
|
525
|
+
*/
|
|
526
|
+
deduplicateMoments(moments) {
|
|
527
|
+
if (moments.length <= 1) {
|
|
528
|
+
return moments;
|
|
529
|
+
}
|
|
530
|
+
const sorted = [...moments].sort((a, b) => a.timestamp - b.timestamp);
|
|
531
|
+
const result = [sorted[0]];
|
|
532
|
+
for (let i = 1; i < sorted.length; i++) {
|
|
533
|
+
const prev = result[result.length - 1];
|
|
534
|
+
const curr = sorted[i];
|
|
535
|
+
if (curr.timestamp - prev.timestamp < 1) {
|
|
536
|
+
const currPriority = this.momentPriority(curr);
|
|
537
|
+
const prevPriority = this.momentPriority(prev);
|
|
538
|
+
if (currPriority > prevPriority || currPriority === prevPriority && curr.confidence > prev.confidence) {
|
|
539
|
+
result[result.length - 1] = curr;
|
|
540
|
+
}
|
|
541
|
+
} else {
|
|
542
|
+
result.push(curr);
|
|
543
|
+
}
|
|
544
|
+
}
|
|
545
|
+
return result;
|
|
546
|
+
}
|
|
547
|
+
momentPriority(moment) {
|
|
548
|
+
const reason = (moment.reason || "").toLowerCase();
|
|
549
|
+
if (reason.includes("session start") || reason.includes("session end")) {
|
|
550
|
+
return 4;
|
|
551
|
+
}
|
|
552
|
+
if (reason.includes("ai-") || reason.includes("ai ")) {
|
|
553
|
+
return 3;
|
|
554
|
+
}
|
|
555
|
+
if (reason.includes("natural pause")) {
|
|
556
|
+
return 2;
|
|
557
|
+
}
|
|
558
|
+
if (reason.includes("periodic")) {
|
|
559
|
+
return 0;
|
|
560
|
+
}
|
|
561
|
+
return 1;
|
|
562
|
+
}
|
|
563
|
+
};
|
|
564
|
+
var transcriptAnalyzer = new TranscriptAnalyzer();
|
|
565
|
+
|
|
566
|
+
// src/main/pipeline/FrameExtractor.ts
|
|
567
|
+
import { execFile as execFileCb3 } from "child_process";
|
|
568
|
+
import { promisify } from "util";
|
|
569
|
+
import { existsSync, mkdirSync } from "fs";
|
|
570
|
+
import { stat as statFile } from "fs/promises";
|
|
571
|
+
import { join as join3 } from "path";
|
|
572
|
+
var execFile = promisify(execFileCb3);
|
|
573
|
+
var DEFAULT_MAX_FRAMES = 20;
|
|
574
|
+
var FFMPEG_ACCURATE_FRAME_TIMEOUT_MS = 2e4;
|
|
575
|
+
var FFMPEG_FAST_FRAME_TIMEOUT_MS = 1e4;
|
|
576
|
+
var FFMPEG_CHECK_TIMEOUT_MS = 5e3;
|
|
577
|
+
var FRAME_EDGE_MARGIN_SECONDS2 = 0.35;
|
|
578
|
+
var TIMESTAMP_DEDUPE_WINDOW_SECONDS = 0.15;
|
|
579
|
+
var SAFE_CHILD_ENV3 = {
|
|
580
|
+
PATH: process.env.PATH,
|
|
581
|
+
HOME: process.env.HOME || process.env.USERPROFILE,
|
|
582
|
+
USERPROFILE: process.env.USERPROFILE,
|
|
583
|
+
LANG: process.env.LANG,
|
|
584
|
+
TMPDIR: process.env.TMPDIR || process.env.TEMP,
|
|
585
|
+
TEMP: process.env.TEMP
|
|
586
|
+
};
|
|
587
|
+
var FrameExtractor = class {
|
|
588
|
+
ffmpegPath = "ffmpeg";
|
|
589
|
+
ffprobePath = "ffprobe";
|
|
590
|
+
ffmpegChecked = false;
|
|
591
|
+
ffmpegAvailable = false;
|
|
592
|
+
/**
|
|
593
|
+
* Check if ffmpeg is installed and accessible on the system PATH.
|
|
594
|
+
* Result is cached after the first successful check.
|
|
595
|
+
*/
|
|
596
|
+
async checkFfmpeg() {
|
|
597
|
+
if (this.ffmpegChecked) {
|
|
598
|
+
return this.ffmpegAvailable;
|
|
599
|
+
}
|
|
600
|
+
try {
|
|
601
|
+
await execFile(this.ffmpegPath, ["-version"], {
|
|
602
|
+
timeout: FFMPEG_CHECK_TIMEOUT_MS,
|
|
603
|
+
env: SAFE_CHILD_ENV3
|
|
604
|
+
});
|
|
605
|
+
this.ffmpegAvailable = true;
|
|
606
|
+
this.log("ffmpeg is available");
|
|
607
|
+
} catch {
|
|
608
|
+
this.ffmpegAvailable = false;
|
|
609
|
+
this.log("ffmpeg is not available - frame extraction will be skipped");
|
|
610
|
+
}
|
|
611
|
+
this.ffmpegChecked = true;
|
|
612
|
+
return this.ffmpegAvailable;
|
|
613
|
+
}
|
|
614
|
+
/**
|
|
615
|
+
* Extract frames from a video file at the specified timestamps.
|
|
616
|
+
*
|
|
617
|
+
* @param request - Extraction parameters (video path, timestamps, output dir)
|
|
618
|
+
* @returns Result with extracted frame paths and ffmpeg availability status
|
|
619
|
+
*/
|
|
620
|
+
async extract(request) {
|
|
621
|
+
const available = await this.checkFfmpeg();
|
|
622
|
+
if (!available) {
|
|
623
|
+
return { frames: [], ffmpegAvailable: false };
|
|
624
|
+
}
|
|
625
|
+
const maxFrames = request.maxFrames ?? DEFAULT_MAX_FRAMES;
|
|
626
|
+
let timestamps = [...request.timestamps].sort((a, b) => a - b);
|
|
627
|
+
if (timestamps.length > maxFrames) {
|
|
628
|
+
timestamps = this.selectDistributed(timestamps, maxFrames);
|
|
629
|
+
}
|
|
630
|
+
const videoDurationSeconds = await this.getVideoDurationSeconds(request.videoPath);
|
|
631
|
+
timestamps = this.normalizeTimestamps(timestamps, videoDurationSeconds);
|
|
632
|
+
if (timestamps.length === 0) {
|
|
633
|
+
return { frames: [], ffmpegAvailable: true };
|
|
634
|
+
}
|
|
635
|
+
const screenshotsDir = join3(request.outputDir, "screenshots");
|
|
636
|
+
if (!existsSync(screenshotsDir)) {
|
|
637
|
+
mkdirSync(screenshotsDir, { recursive: true });
|
|
638
|
+
}
|
|
639
|
+
const frames = [];
|
|
640
|
+
for (let i = 0; i < timestamps.length; i++) {
|
|
641
|
+
const timestamp = timestamps[i];
|
|
642
|
+
const frameNumber = String(i + 1).padStart(3, "0");
|
|
643
|
+
const outputPath = join3(screenshotsDir, `frame-${frameNumber}.png`);
|
|
644
|
+
try {
|
|
645
|
+
await this.extractSingleFrame(request.videoPath, timestamp, outputPath);
|
|
646
|
+
const stats = await statFile(outputPath).catch(() => null);
|
|
647
|
+
if (!stats || stats.size <= 0) {
|
|
648
|
+
throw new Error(`ffmpeg did not produce a frame file at timestamp ${timestamp.toFixed(1)}s. The video may be shorter than expected or the codec may not support seeking.`);
|
|
649
|
+
}
|
|
650
|
+
frames.push({
|
|
651
|
+
path: outputPath,
|
|
652
|
+
timestamp,
|
|
653
|
+
success: true
|
|
654
|
+
});
|
|
655
|
+
this.log(`Extracted frame ${frameNumber} at ${timestamp.toFixed(2)}s`);
|
|
656
|
+
} catch (error) {
|
|
657
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
658
|
+
this.log(`Failed to extract frame at ${timestamp.toFixed(2)}s: ${message}`);
|
|
659
|
+
frames.push({
|
|
660
|
+
path: outputPath,
|
|
661
|
+
timestamp,
|
|
662
|
+
success: false
|
|
663
|
+
});
|
|
664
|
+
}
|
|
665
|
+
}
|
|
666
|
+
return { frames, ffmpegAvailable: true };
|
|
667
|
+
}
|
|
668
|
+
// ============================================================================
|
|
669
|
+
// Private Methods
|
|
670
|
+
// ============================================================================
|
|
671
|
+
/**
|
|
672
|
+
* Extract a single frame from the video at the given timestamp.
|
|
673
|
+
*/
|
|
674
|
+
async extractSingleFrame(videoPath, timestamp, outputPath) {
|
|
675
|
+
try {
|
|
676
|
+
await this.extractSingleFrameAccurate(videoPath, timestamp, outputPath);
|
|
677
|
+
return;
|
|
678
|
+
} catch (accurateError) {
|
|
679
|
+
this.log(
|
|
680
|
+
`Accurate extraction failed at ${timestamp.toFixed(2)}s, retrying fast seek: ${accurateError instanceof Error ? accurateError.message : String(accurateError)}`
|
|
681
|
+
);
|
|
682
|
+
}
|
|
683
|
+
await this.extractSingleFrameFast(videoPath, timestamp, outputPath);
|
|
684
|
+
}
|
|
685
|
+
async extractSingleFrameAccurate(videoPath, timestamp, outputPath) {
|
|
686
|
+
const args = [
|
|
687
|
+
"-i",
|
|
688
|
+
videoPath,
|
|
689
|
+
"-ss",
|
|
690
|
+
String(timestamp),
|
|
691
|
+
"-frames:v",
|
|
692
|
+
"1",
|
|
693
|
+
"-vf",
|
|
694
|
+
"format=rgb24",
|
|
695
|
+
"-q:v",
|
|
696
|
+
"2",
|
|
697
|
+
"-y",
|
|
698
|
+
outputPath
|
|
699
|
+
];
|
|
700
|
+
await execFile(this.ffmpegPath, args, {
|
|
701
|
+
timeout: FFMPEG_ACCURATE_FRAME_TIMEOUT_MS,
|
|
702
|
+
env: SAFE_CHILD_ENV3
|
|
703
|
+
});
|
|
704
|
+
}
|
|
705
|
+
async extractSingleFrameFast(videoPath, timestamp, outputPath) {
|
|
706
|
+
const args = [
|
|
707
|
+
"-ss",
|
|
708
|
+
String(timestamp),
|
|
709
|
+
"-i",
|
|
710
|
+
videoPath,
|
|
711
|
+
"-frames:v",
|
|
712
|
+
"1",
|
|
713
|
+
"-vf",
|
|
714
|
+
"format=rgb24",
|
|
715
|
+
"-q:v",
|
|
716
|
+
"2",
|
|
717
|
+
"-y",
|
|
718
|
+
// overwrite output file if it exists
|
|
719
|
+
outputPath
|
|
720
|
+
];
|
|
721
|
+
await execFile(this.ffmpegPath, args, {
|
|
722
|
+
timeout: FFMPEG_FAST_FRAME_TIMEOUT_MS,
|
|
723
|
+
env: SAFE_CHILD_ENV3
|
|
724
|
+
});
|
|
725
|
+
}
|
|
726
|
+
/**
|
|
727
|
+
* Select evenly distributed timestamps from a sorted array.
|
|
728
|
+
* Always includes the first and last timestamp.
|
|
729
|
+
*/
|
|
730
|
+
selectDistributed(sorted, count) {
|
|
731
|
+
if (sorted.length <= count) {
|
|
732
|
+
return sorted;
|
|
733
|
+
}
|
|
734
|
+
if (count <= 0) {
|
|
735
|
+
return [];
|
|
736
|
+
}
|
|
737
|
+
if (count === 1) {
|
|
738
|
+
return [sorted[0]];
|
|
739
|
+
}
|
|
740
|
+
const result = [sorted[0]];
|
|
741
|
+
const step = (sorted.length - 1) / (count - 1);
|
|
742
|
+
for (let i = 1; i < count - 1; i++) {
|
|
743
|
+
const index = Math.round(i * step);
|
|
744
|
+
result.push(sorted[index]);
|
|
745
|
+
}
|
|
746
|
+
result.push(sorted[sorted.length - 1]);
|
|
747
|
+
return result;
|
|
748
|
+
}
|
|
749
|
+
async getVideoDurationSeconds(videoPath) {
|
|
750
|
+
try {
|
|
751
|
+
const { stdout } = await execFile(
|
|
752
|
+
this.ffprobePath,
|
|
753
|
+
[
|
|
754
|
+
"-v",
|
|
755
|
+
"error",
|
|
756
|
+
"-show_entries",
|
|
757
|
+
"format=duration",
|
|
758
|
+
"-of",
|
|
759
|
+
"default=noprint_wrappers=1:nokey=1",
|
|
760
|
+
videoPath
|
|
761
|
+
],
|
|
762
|
+
{ timeout: FFMPEG_CHECK_TIMEOUT_MS, env: SAFE_CHILD_ENV3 }
|
|
763
|
+
);
|
|
764
|
+
const parsed = Number.parseFloat(String(stdout).trim());
|
|
765
|
+
if (Number.isFinite(parsed) && parsed > 0) {
|
|
766
|
+
return parsed;
|
|
767
|
+
}
|
|
768
|
+
return null;
|
|
769
|
+
} catch (error) {
|
|
770
|
+
this.log(`ffprobe duration probe failed: ${error instanceof Error ? error.message : String(error)}`);
|
|
771
|
+
return null;
|
|
772
|
+
}
|
|
773
|
+
}
|
|
774
|
+
normalizeTimestamps(timestamps, durationSeconds) {
|
|
775
|
+
const cleaned = timestamps.map((timestamp) => Number.isFinite(timestamp) ? Math.max(0, timestamp) : 0).sort((a, b) => a - b);
|
|
776
|
+
if (cleaned.length === 0) {
|
|
777
|
+
return [];
|
|
778
|
+
}
|
|
779
|
+
let clamped = cleaned;
|
|
780
|
+
if (durationSeconds && durationSeconds > 0) {
|
|
781
|
+
const minTs = Math.min(FRAME_EDGE_MARGIN_SECONDS2, Math.max(0, durationSeconds - 0.05));
|
|
782
|
+
const maxTs = Math.max(minTs, durationSeconds - FRAME_EDGE_MARGIN_SECONDS2);
|
|
783
|
+
clamped = cleaned.map((timestamp) => Math.max(minTs, Math.min(timestamp, maxTs)));
|
|
784
|
+
}
|
|
785
|
+
const deduped = [];
|
|
786
|
+
for (const timestamp of clamped) {
|
|
787
|
+
const previous = deduped[deduped.length - 1];
|
|
788
|
+
if (previous === void 0 || Math.abs(timestamp - previous) >= TIMESTAMP_DEDUPE_WINDOW_SECONDS) {
|
|
789
|
+
deduped.push(timestamp);
|
|
790
|
+
}
|
|
791
|
+
}
|
|
792
|
+
return deduped;
|
|
793
|
+
}
|
|
794
|
+
/**
|
|
795
|
+
* Log helper with consistent prefix.
|
|
796
|
+
*/
|
|
797
|
+
log(message) {
|
|
798
|
+
const timestamp = (/* @__PURE__ */ new Date()).toISOString();
|
|
799
|
+
console.log(`[FrameExtractor ${timestamp}] ${message}`);
|
|
800
|
+
}
|
|
801
|
+
};
|
|
802
|
+
var frameExtractor = new FrameExtractor();
|
|
803
|
+
|
|
804
|
+
// src/main/output/MarkdownGenerator.ts
|
|
805
|
+
import * as path2 from "path";
|
|
806
|
+
var REPORT_SUPPORT_LINE = "*If this report saved you time, support development: [Ko-fi](https://ko-fi.com/eddiesanjuan)*";
|
|
807
|
+
var MarkdownGeneratorImpl = class {
|
|
808
|
+
/**
|
|
809
|
+
* Generate a full markdown document with all feedback items and metadata.
|
|
810
|
+
* Follows llms.txt-inspired format for AI readability.
|
|
811
|
+
*/
|
|
812
|
+
generateFullDocument(session, options) {
|
|
813
|
+
const { projectName, screenshotDir } = options;
|
|
814
|
+
const items = session.feedbackItems;
|
|
815
|
+
const duration = session.endTime ? this.formatDuration(session.endTime - session.startTime) : "In Progress";
|
|
816
|
+
const timestamp = this.formatTimestamp(session.endTime || Date.now());
|
|
817
|
+
const filename = this.generateFilename(projectName, session.startTime);
|
|
818
|
+
if (items.length === 0) {
|
|
819
|
+
const content2 = `# ${projectName} Feedback Report
|
|
820
|
+
> Generated by markupr on ${timestamp}
|
|
821
|
+
> Duration: ${duration}
|
|
822
|
+
|
|
823
|
+
_No feedback items were captured during this session._
|
|
824
|
+
|
|
825
|
+
---
|
|
826
|
+
*Generated by [markupr](https://markupr.com)*
|
|
827
|
+
${REPORT_SUPPORT_LINE}
|
|
828
|
+
`;
|
|
829
|
+
return {
|
|
830
|
+
content: content2,
|
|
831
|
+
filename,
|
|
832
|
+
metadata: {
|
|
833
|
+
itemCount: 0,
|
|
834
|
+
screenshotCount: 0,
|
|
835
|
+
duration: session.endTime ? session.endTime - session.startTime : 0,
|
|
836
|
+
types: {}
|
|
837
|
+
}
|
|
838
|
+
};
|
|
839
|
+
}
|
|
840
|
+
const typeCounts = this.countTypes(items);
|
|
841
|
+
const severityCounts = this.countSeverities(items);
|
|
842
|
+
const screenshotCount = this.countScreenshots(items);
|
|
843
|
+
const topThemes = this.extractTopThemes(items);
|
|
844
|
+
const highImpactCount = (severityCounts.Critical || 0) + (severityCounts.High || 0);
|
|
845
|
+
const platform = session.metadata?.os || process?.platform || "Unknown";
|
|
846
|
+
let content = `# ${projectName} Feedback Report
|
|
847
|
+
> Generated by markupr on ${timestamp}
|
|
848
|
+
> Duration: ${duration} | Items: ${items.length} | Screenshots: ${screenshotCount}
|
|
849
|
+
|
|
850
|
+
## Session Overview
|
|
851
|
+
- **Session ID:** \`${session.id}\`
|
|
852
|
+
- **Source:** ${session.metadata?.sourceName || "Unknown"} (${session.metadata?.sourceType || "screen"})
|
|
853
|
+
- **Platform:** ${platform}
|
|
854
|
+
- **Segments:** ${items.length}
|
|
855
|
+
- **High-impact items:** ${highImpactCount}
|
|
856
|
+
|
|
857
|
+
---
|
|
858
|
+
|
|
859
|
+
## Executive Summary
|
|
860
|
+
|
|
861
|
+
- ${items.length} total feedback items were captured.
|
|
862
|
+
- ${highImpactCount} items are categorized as **Critical** or **High** priority.
|
|
863
|
+
- ${screenshotCount} screenshots were aligned to spoken context.
|
|
864
|
+
`;
|
|
865
|
+
if (topThemes.length > 0) {
|
|
866
|
+
content += `- Top themes: ${topThemes.join(", ")}.
|
|
867
|
+
`;
|
|
868
|
+
}
|
|
869
|
+
content += `
|
|
870
|
+
---
|
|
871
|
+
|
|
872
|
+
## Actionable Feedback
|
|
873
|
+
|
|
874
|
+
`;
|
|
875
|
+
items.forEach((item, index) => {
|
|
876
|
+
const id = this.generateFeedbackItemId(index);
|
|
877
|
+
const title = item.title || this.generateTitle(item.transcription);
|
|
878
|
+
const itemTimestamp = this.formatItemTimestamp(item.timestamp - session.startTime);
|
|
879
|
+
const category = item.category || "General";
|
|
880
|
+
const severity = item.severity || this.defaultSeverityForCategory(category);
|
|
881
|
+
const signals = item.keywords?.slice(0, 5) || [];
|
|
882
|
+
const suggestedAction = this.suggestAction(category, severity, item.transcription);
|
|
883
|
+
content += `### ${id}: ${title}
|
|
884
|
+
- **Severity:** ${severity}
|
|
885
|
+
- **Type:** ${category}
|
|
886
|
+
- **Timestamp:** ${itemTimestamp}
|
|
887
|
+
`;
|
|
888
|
+
if (signals.length > 0) {
|
|
889
|
+
content += `- **Signals:** ${signals.join(", ")}
|
|
890
|
+
`;
|
|
891
|
+
}
|
|
892
|
+
content += `
|
|
893
|
+
#### What Happened
|
|
894
|
+
|
|
895
|
+
> ${this.wrapTranscription(item.transcription)}
|
|
896
|
+
|
|
897
|
+
`;
|
|
898
|
+
if (item.screenshots.length > 0) {
|
|
899
|
+
content += `#### Evidence
|
|
900
|
+
`;
|
|
901
|
+
item.screenshots.forEach((ss, ssIndex) => {
|
|
902
|
+
const screenshotFilename = this.generateScreenshotFilename(index, ssIndex, item.screenshots.length);
|
|
903
|
+
content += `
|
|
904
|
+
|
|
905
|
+
`;
|
|
906
|
+
});
|
|
907
|
+
} else {
|
|
908
|
+
content += `#### Evidence
|
|
909
|
+
_No screenshot captured for this item._
|
|
910
|
+
|
|
911
|
+
`;
|
|
912
|
+
}
|
|
913
|
+
content += `#### Suggested Next Step
|
|
914
|
+
- ${suggestedAction}
|
|
915
|
+
|
|
916
|
+
`;
|
|
917
|
+
content += `---
|
|
918
|
+
|
|
919
|
+
`;
|
|
920
|
+
});
|
|
921
|
+
content += `## Summary
|
|
922
|
+
|
|
923
|
+
| Type | Count |
|
|
924
|
+
|------|-------|
|
|
925
|
+
`;
|
|
926
|
+
Object.entries(typeCounts).forEach(([type, count]) => {
|
|
927
|
+
content += `| ${type} | ${count} |
|
|
928
|
+
`;
|
|
929
|
+
});
|
|
930
|
+
content += `| **Total** | **${items.length}** |
|
|
931
|
+
`;
|
|
932
|
+
content += `
|
|
933
|
+
| Severity | Count |
|
|
934
|
+
|----------|-------|
|
|
935
|
+
`;
|
|
936
|
+
Object.entries(severityCounts).forEach(([severity, count]) => {
|
|
937
|
+
content += `| ${severity} | ${count} |
|
|
938
|
+
`;
|
|
939
|
+
});
|
|
940
|
+
content += `| **Total** | **${items.length}** |
|
|
941
|
+
`;
|
|
942
|
+
content += `
|
|
943
|
+
---
|
|
944
|
+
*Generated by [markupr](https://markupr.com)*
|
|
945
|
+
${REPORT_SUPPORT_LINE}
|
|
946
|
+
`;
|
|
947
|
+
return {
|
|
948
|
+
content,
|
|
949
|
+
filename,
|
|
950
|
+
metadata: {
|
|
951
|
+
itemCount: items.length,
|
|
952
|
+
screenshotCount,
|
|
953
|
+
duration: session.endTime ? session.endTime - session.startTime : 0,
|
|
954
|
+
types: typeCounts
|
|
955
|
+
}
|
|
956
|
+
};
|
|
957
|
+
}
|
|
958
|
+
/**
|
|
959
|
+
* Generate markdown from a PostProcessResult (post-recording pipeline output).
|
|
960
|
+
*
|
|
961
|
+
* Produces a clean, AI-readable document with:
|
|
962
|
+
* - Session header with human-readable timestamp
|
|
963
|
+
* - Each transcript segment as a heading with [M:SS] timestamp
|
|
964
|
+
* - Blockquoted transcript text
|
|
965
|
+
* - Associated frame images referenced as relative paths
|
|
966
|
+
*
|
|
967
|
+
* @param result - The combined transcript + frame output from PostProcessor
|
|
968
|
+
* @param sessionDir - Absolute path to the session directory (used to compute relative frame paths)
|
|
969
|
+
* @returns The generated markdown string
|
|
970
|
+
*/
|
|
971
|
+
generateFromPostProcess(result, sessionDir) {
|
|
972
|
+
const { transcriptSegments, extractedFrames } = result;
|
|
973
|
+
const sessionTimestamp = this.formatDateDeterministic(/* @__PURE__ */ new Date());
|
|
974
|
+
const sessionDuration = transcriptSegments.length > 0 ? this.formatDuration(
|
|
975
|
+
(transcriptSegments[transcriptSegments.length - 1].endTime - transcriptSegments[0].startTime) * 1e3
|
|
976
|
+
) : "0:00";
|
|
977
|
+
let md = `# markupr Session \u2014 ${sessionTimestamp}
|
|
978
|
+
`;
|
|
979
|
+
md += `> Segments: ${transcriptSegments.length} | Frames: ${extractedFrames.length} | Duration: ${sessionDuration}
|
|
980
|
+
|
|
981
|
+
`;
|
|
982
|
+
if (transcriptSegments.length === 0) {
|
|
983
|
+
md += `_No speech was detected during this recording._
|
|
984
|
+
`;
|
|
985
|
+
return md;
|
|
986
|
+
}
|
|
987
|
+
md += `## Transcript
|
|
988
|
+
|
|
989
|
+
`;
|
|
990
|
+
const segmentFrameMap = this.mapFramesToSegments(transcriptSegments, extractedFrames);
|
|
991
|
+
for (let i = 0; i < transcriptSegments.length; i++) {
|
|
992
|
+
const segment = transcriptSegments[i];
|
|
993
|
+
const formattedTime = this.formatPostProcessTimestamp(segment.startTime);
|
|
994
|
+
const title = this.generateSegmentTitle(segment.text);
|
|
995
|
+
md += `### [${formattedTime}] ${title}
|
|
996
|
+
`;
|
|
997
|
+
md += `> ${this.wrapTranscription(segment.text)}
|
|
998
|
+
|
|
999
|
+
`;
|
|
1000
|
+
const frames = segmentFrameMap.get(i);
|
|
1001
|
+
if (frames && frames.length > 0) {
|
|
1002
|
+
for (const frame of frames) {
|
|
1003
|
+
const frameTimestamp = this.formatPostProcessTimestamp(frame.timestamp);
|
|
1004
|
+
const relativePath = this.computeRelativeFramePath(frame.path, sessionDir);
|
|
1005
|
+
md += `
|
|
1006
|
+
|
|
1007
|
+
`;
|
|
1008
|
+
}
|
|
1009
|
+
}
|
|
1010
|
+
}
|
|
1011
|
+
md += `---
|
|
1012
|
+
*Generated by [markupr](https://markupr.com)*
|
|
1013
|
+
${REPORT_SUPPORT_LINE}
|
|
1014
|
+
`;
|
|
1015
|
+
return md;
|
|
1016
|
+
}
|
|
1017
|
+
/**
|
|
1018
|
+
* Map extracted frames to their closest transcript segments.
|
|
1019
|
+
* Returns a Map from segment index to an array of frames.
|
|
1020
|
+
*/
|
|
1021
|
+
mapFramesToSegments(segments, frames) {
|
|
1022
|
+
const map = /* @__PURE__ */ new Map();
|
|
1023
|
+
for (const frame of frames) {
|
|
1024
|
+
let bestIndex = 0;
|
|
1025
|
+
let bestDistance = Infinity;
|
|
1026
|
+
for (let i = 0; i < segments.length; i++) {
|
|
1027
|
+
const seg = segments[i];
|
|
1028
|
+
if (frame.timestamp >= seg.startTime && frame.timestamp <= seg.endTime) {
|
|
1029
|
+
bestIndex = i;
|
|
1030
|
+
bestDistance = 0;
|
|
1031
|
+
break;
|
|
1032
|
+
}
|
|
1033
|
+
const distance = Math.abs(frame.timestamp - seg.startTime);
|
|
1034
|
+
if (distance < bestDistance) {
|
|
1035
|
+
bestDistance = distance;
|
|
1036
|
+
bestIndex = i;
|
|
1037
|
+
}
|
|
1038
|
+
}
|
|
1039
|
+
const existing = map.get(bestIndex) || [];
|
|
1040
|
+
existing.push(frame);
|
|
1041
|
+
map.set(bestIndex, existing);
|
|
1042
|
+
}
|
|
1043
|
+
for (const [, frameList] of map) {
|
|
1044
|
+
frameList.sort((a, b) => a.timestamp - b.timestamp);
|
|
1045
|
+
}
|
|
1046
|
+
return map;
|
|
1047
|
+
}
|
|
1048
|
+
/**
|
|
1049
|
+
* Compute a relative path for a frame image from the session directory.
|
|
1050
|
+
* If the frame path is already relative, return it as-is.
|
|
1051
|
+
* If absolute, compute the relative path from sessionDir.
|
|
1052
|
+
*/
|
|
1053
|
+
computeRelativeFramePath(framePath, sessionDir) {
|
|
1054
|
+
if (!path2.isAbsolute(framePath)) {
|
|
1055
|
+
return framePath;
|
|
1056
|
+
}
|
|
1057
|
+
return path2.relative(sessionDir, framePath);
|
|
1058
|
+
}
|
|
1059
|
+
/**
|
|
1060
|
+
* Format a timestamp in seconds to M:SS format for post-process output.
|
|
1061
|
+
* Examples: 0 -> "0:00", 15.3 -> "0:15", 125 -> "2:05"
|
|
1062
|
+
*/
|
|
1063
|
+
formatPostProcessTimestamp(seconds) {
|
|
1064
|
+
const totalSeconds = Math.max(0, Math.floor(seconds));
|
|
1065
|
+
const mins = Math.floor(totalSeconds / 60);
|
|
1066
|
+
const secs = totalSeconds % 60;
|
|
1067
|
+
return `${mins}:${secs.toString().padStart(2, "0")}`;
|
|
1068
|
+
}
|
|
1069
|
+
/**
|
|
1070
|
+
* Generate a short title from transcript text (first sentence, max 60 chars).
|
|
1071
|
+
*/
|
|
1072
|
+
generateSegmentTitle(text) {
|
|
1073
|
+
const firstSentence = text.split(/[.!?]/)[0].trim();
|
|
1074
|
+
if (firstSentence.length <= 60) return firstSentence;
|
|
1075
|
+
return firstSentence.slice(0, 57) + "...";
|
|
1076
|
+
}
|
|
1077
|
+
/**
|
|
1078
|
+
* Generate a clipboard-friendly summary (<1500 chars).
|
|
1079
|
+
* Includes priority items and a reference to the full report.
|
|
1080
|
+
*
|
|
1081
|
+
* @param session - Session data
|
|
1082
|
+
* @param projectName - Optional project name for the header
|
|
1083
|
+
* @param reportPath - Optional absolute or relative path to the full report file.
|
|
1084
|
+
* When provided, the summary links to this path instead of the
|
|
1085
|
+
* generic ./feedback-report.md placeholder.
|
|
1086
|
+
*/
|
|
1087
|
+
generateClipboardSummary(session, projectName, reportPath) {
|
|
1088
|
+
const name = projectName || session.metadata?.sourceName || "Project";
|
|
1089
|
+
const items = session.feedbackItems;
|
|
1090
|
+
let summary = `# Feedback: ${name} - ${items.length} items
|
|
1091
|
+
|
|
1092
|
+
`;
|
|
1093
|
+
const maxPriorityItems = 3;
|
|
1094
|
+
summary += `## Priority Items
|
|
1095
|
+
`;
|
|
1096
|
+
items.slice(0, maxPriorityItems).forEach((item, index) => {
|
|
1097
|
+
const id = this.generateFeedbackItemId(index);
|
|
1098
|
+
const title = this.generateTitle(item.transcription);
|
|
1099
|
+
const oneLineSummary = this.truncateText(item.transcription, 60);
|
|
1100
|
+
summary += `- **${id}:** ${title} - ${oneLineSummary}
|
|
1101
|
+
`;
|
|
1102
|
+
});
|
|
1103
|
+
if (items.length > maxPriorityItems) {
|
|
1104
|
+
const remainingIds = items.slice(maxPriorityItems).map((_, i) => this.generateFeedbackItemId(i + maxPriorityItems)).join(", ");
|
|
1105
|
+
summary += `
|
|
1106
|
+
## Other
|
|
1107
|
+
- ${remainingIds} (see full report)
|
|
1108
|
+
`;
|
|
1109
|
+
}
|
|
1110
|
+
summary += `
|
|
1111
|
+
**Full report:** ${reportPath || "./feedback-report.md"}`;
|
|
1112
|
+
if (summary.length > 1500) {
|
|
1113
|
+
summary = summary.slice(0, 1497) + "...";
|
|
1114
|
+
}
|
|
1115
|
+
return summary;
|
|
1116
|
+
}
|
|
1117
|
+
/**
|
|
1118
|
+
* Generate a feedback item ID (FB-001, FB-002, etc.)
|
|
1119
|
+
*/
|
|
1120
|
+
generateFeedbackItemId(index) {
|
|
1121
|
+
return `FB-${(index + 1).toString().padStart(3, "0")}`;
|
|
1122
|
+
}
|
|
1123
|
+
// ==========================================================================
|
|
1124
|
+
// Private Helper Methods
|
|
1125
|
+
// ==========================================================================
|
|
1126
|
+
/**
|
|
1127
|
+
* Generate a title from the transcription (first sentence or 50 chars)
|
|
1128
|
+
*/
|
|
1129
|
+
generateTitle(transcription) {
|
|
1130
|
+
const firstSentence = transcription.split(/[.!?]/)[0].trim();
|
|
1131
|
+
if (firstSentence.length <= 50) return firstSentence;
|
|
1132
|
+
return firstSentence.slice(0, 47) + "...";
|
|
1133
|
+
}
|
|
1134
|
+
/**
|
|
1135
|
+
* Truncate text to specified length
|
|
1136
|
+
*/
|
|
1137
|
+
truncateText(text, maxLength) {
|
|
1138
|
+
if (text.length <= maxLength) return text;
|
|
1139
|
+
return text.slice(0, maxLength - 3) + "...";
|
|
1140
|
+
}
|
|
1141
|
+
/**
|
|
1142
|
+
* Wrap transcription for markdown blockquote (handle multi-line).
|
|
1143
|
+
* Splits on sentence-ending punctuation followed by whitespace so that
|
|
1144
|
+
* all multi-sentence inputs (including 2-sentence ones) get proper
|
|
1145
|
+
* blockquote continuation lines.
|
|
1146
|
+
*/
|
|
1147
|
+
wrapTranscription(transcription) {
|
|
1148
|
+
if (!transcription.includes(".") && !transcription.includes("!") && !transcription.includes("?")) {
|
|
1149
|
+
return transcription;
|
|
1150
|
+
}
|
|
1151
|
+
const sentences = transcription.split(/(?<=[.!?])\s+/).map((s) => s.trim()).filter(Boolean);
|
|
1152
|
+
if (sentences.length <= 1) return transcription;
|
|
1153
|
+
return sentences.join("\n> ");
|
|
1154
|
+
}
|
|
1155
|
+
/**
|
|
1156
|
+
* Format duration from milliseconds to M:SS
|
|
1157
|
+
*/
|
|
1158
|
+
formatDuration(ms) {
|
|
1159
|
+
const totalSeconds = Math.floor(ms / 1e3);
|
|
1160
|
+
const mins = Math.floor(totalSeconds / 60);
|
|
1161
|
+
const secs = totalSeconds % 60;
|
|
1162
|
+
return `${mins}:${secs.toString().padStart(2, "0")}`;
|
|
1163
|
+
}
|
|
1164
|
+
/**
|
|
1165
|
+
* Format timestamp to a deterministic human-readable string.
|
|
1166
|
+
* Uses explicit formatting instead of toLocaleString to produce
|
|
1167
|
+
* consistent output across platforms and Node.js versions.
|
|
1168
|
+
*/
|
|
1169
|
+
formatTimestamp(ms) {
|
|
1170
|
+
return this.formatDateDeterministic(new Date(ms));
|
|
1171
|
+
}
|
|
1172
|
+
/**
|
|
1173
|
+
* Produce a deterministic date string: "Feb 14, 2026 at 10:30 AM".
|
|
1174
|
+
* Avoids toLocaleString which can vary across OS versions.
|
|
1175
|
+
*/
|
|
1176
|
+
formatDateDeterministic(date) {
|
|
1177
|
+
const months = ["Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"];
|
|
1178
|
+
const month = months[date.getMonth()];
|
|
1179
|
+
const day = date.getDate();
|
|
1180
|
+
const year = date.getFullYear();
|
|
1181
|
+
const rawHours = date.getHours();
|
|
1182
|
+
const ampm = rawHours >= 12 ? "PM" : "AM";
|
|
1183
|
+
const hours = rawHours % 12 || 12;
|
|
1184
|
+
const minutes = date.getMinutes().toString().padStart(2, "0");
|
|
1185
|
+
return `${month} ${day}, ${year} at ${hours}:${minutes} ${ampm}`;
|
|
1186
|
+
}
|
|
1187
|
+
/**
|
|
1188
|
+
* Format item timestamp as MM:SS from session start
|
|
1189
|
+
*/
|
|
1190
|
+
formatItemTimestamp(ms) {
|
|
1191
|
+
const totalSeconds = Math.max(0, Math.floor(ms / 1e3));
|
|
1192
|
+
const mins = Math.floor(totalSeconds / 60);
|
|
1193
|
+
const secs = totalSeconds % 60;
|
|
1194
|
+
return `${mins.toString().padStart(2, "0")}:${secs.toString().padStart(2, "0")}`;
|
|
1195
|
+
}
|
|
1196
|
+
/**
|
|
1197
|
+
* Generate filename following pattern: {project}-feedback-{YYYYMMDD-HHmmss}.md
|
|
1198
|
+
*/
|
|
1199
|
+
generateFilename(projectName, startTime) {
|
|
1200
|
+
const date = new Date(startTime);
|
|
1201
|
+
const year = date.getFullYear();
|
|
1202
|
+
const month = (date.getMonth() + 1).toString().padStart(2, "0");
|
|
1203
|
+
const day = date.getDate().toString().padStart(2, "0");
|
|
1204
|
+
const hours = date.getHours().toString().padStart(2, "0");
|
|
1205
|
+
const minutes = date.getMinutes().toString().padStart(2, "0");
|
|
1206
|
+
const seconds = date.getSeconds().toString().padStart(2, "0");
|
|
1207
|
+
const dateStr = `${year}${month}${day}`;
|
|
1208
|
+
const timeStr = `${hours}${minutes}${seconds}`;
|
|
1209
|
+
const safeName = projectName.toLowerCase().replace(/[^a-z0-9]/g, "-").replace(/-+/g, "-");
|
|
1210
|
+
return `${safeName}-feedback-${dateStr}-${timeStr}.md`;
|
|
1211
|
+
}
|
|
1212
|
+
/**
|
|
1213
|
+
* Generate screenshot filename for a feedback item.
|
|
1214
|
+
* Uses the item's position index to produce `fb-{NNN}.png`, matching the
|
|
1215
|
+
* naming convention in FileManager.saveSession.
|
|
1216
|
+
*/
|
|
1217
|
+
generateScreenshotFilename(itemIndex, screenshotIndex, total) {
|
|
1218
|
+
const num = (itemIndex + 1).toString().padStart(3, "0");
|
|
1219
|
+
const suffix = total > 1 ? `-${screenshotIndex + 1}` : "";
|
|
1220
|
+
return `fb-${num}${suffix}.png`;
|
|
1221
|
+
}
|
|
1222
|
+
/**
|
|
1223
|
+
* Provide a severity fallback when upstream analysis is unavailable.
|
|
1224
|
+
*/
|
|
1225
|
+
defaultSeverityForCategory(category) {
|
|
1226
|
+
switch (category) {
|
|
1227
|
+
case "Bug":
|
|
1228
|
+
return "High";
|
|
1229
|
+
case "Performance":
|
|
1230
|
+
return "High";
|
|
1231
|
+
case "UX Issue":
|
|
1232
|
+
return "Medium";
|
|
1233
|
+
case "Suggestion":
|
|
1234
|
+
return "Low";
|
|
1235
|
+
case "Question":
|
|
1236
|
+
return "Low";
|
|
1237
|
+
default:
|
|
1238
|
+
return "Medium";
|
|
1239
|
+
}
|
|
1240
|
+
}
|
|
1241
|
+
countSeverities(items) {
|
|
1242
|
+
return items.reduce((acc, item) => {
|
|
1243
|
+
const severity = item.severity || this.defaultSeverityForCategory(item.category || "General");
|
|
1244
|
+
acc[severity] = (acc[severity] || 0) + 1;
|
|
1245
|
+
return acc;
|
|
1246
|
+
}, {});
|
|
1247
|
+
}
|
|
1248
|
+
extractTopThemes(items) {
|
|
1249
|
+
const counts = /* @__PURE__ */ new Map();
|
|
1250
|
+
items.forEach((item) => {
|
|
1251
|
+
(item.keywords || []).forEach((keyword) => {
|
|
1252
|
+
const normalized = keyword.toLowerCase();
|
|
1253
|
+
counts.set(normalized, (counts.get(normalized) || 0) + 1);
|
|
1254
|
+
});
|
|
1255
|
+
});
|
|
1256
|
+
return Array.from(counts.entries()).sort((a, b) => b[1] - a[1]).slice(0, 5).map(([keyword]) => keyword);
|
|
1257
|
+
}
|
|
1258
|
+
suggestAction(category, severity, transcription) {
|
|
1259
|
+
const excerpt = this.truncateText(transcription, 120);
|
|
1260
|
+
switch (category) {
|
|
1261
|
+
case "Bug":
|
|
1262
|
+
return `Reproduce and patch this defect, then add a regression test that validates: "${excerpt}".`;
|
|
1263
|
+
case "Performance":
|
|
1264
|
+
return `Profile this flow, target the slow step first, and validate before/after metrics for: "${excerpt}".`;
|
|
1265
|
+
case "UX Issue":
|
|
1266
|
+
return `Revise the UI interaction and run a quick usability check focused on: "${excerpt}".`;
|
|
1267
|
+
case "Suggestion":
|
|
1268
|
+
return severity === "High" || severity === "Critical" ? `Treat this suggestion as near-term roadmap work and define implementation scope for: "${excerpt}".` : `Track this as an enhancement request and prioritize against current sprint goals: "${excerpt}".`;
|
|
1269
|
+
case "Question":
|
|
1270
|
+
return `Answer this explicitly in product/docs so future reviews don't block on: "${excerpt}".`;
|
|
1271
|
+
default:
|
|
1272
|
+
return `Investigate this item and convert it into a concrete engineering task: "${excerpt}".`;
|
|
1273
|
+
}
|
|
1274
|
+
}
|
|
1275
|
+
/**
|
|
1276
|
+
* Count feedback items by type/category
|
|
1277
|
+
*/
|
|
1278
|
+
countTypes(items) {
|
|
1279
|
+
return items.reduce((acc, item) => {
|
|
1280
|
+
const type = item.category || "General";
|
|
1281
|
+
acc[type] = (acc[type] || 0) + 1;
|
|
1282
|
+
return acc;
|
|
1283
|
+
}, {});
|
|
1284
|
+
}
|
|
1285
|
+
/**
|
|
1286
|
+
* Count total screenshots across all items
|
|
1287
|
+
*/
|
|
1288
|
+
countScreenshots(items) {
|
|
1289
|
+
return items.reduce((sum, item) => sum + item.screenshots.length, 0);
|
|
1290
|
+
}
|
|
1291
|
+
};
|
|
1292
|
+
var markdownGenerator = new MarkdownGeneratorImpl();
|
|
1293
|
+
|
|
1294
|
+
// src/main/transcription/WhisperService.ts
|
|
1295
|
+
import { EventEmitter } from "events";
|
|
1296
|
+
import { basename as basename2, join as join4 } from "path";
|
|
1297
|
+
import { existsSync as existsSync2 } from "fs";
|
|
1298
|
+
import { readFile as readFile2, unlink, chmod } from "fs/promises";
|
|
1299
|
+
import { execFile as execFile2 } from "child_process";
|
|
1300
|
+
import { promisify as promisify2 } from "util";
|
|
1301
|
+
import { tmpdir } from "os";
|
|
1302
|
+
import { randomUUID } from "crypto";
|
|
1303
|
+
import * as os2 from "os";
|
|
1304
|
+
var execFileAsync = promisify2(execFile2);
|
|
1305
|
+
var DEFAULT_CONFIG = {
|
|
1306
|
+
modelPath: "",
|
|
1307
|
+
// Set dynamically
|
|
1308
|
+
language: "en",
|
|
1309
|
+
threads: Math.max(1, Math.floor(os2.cpus().length / 2)),
|
|
1310
|
+
// Half CPU cores
|
|
1311
|
+
translateToEnglish: false
|
|
1312
|
+
};
|
|
1313
|
+
var CHUNK_DURATION_MS = 3e3;
|
|
1314
|
+
var MAX_BUFFER_DURATION_MS = 3e4;
|
|
1315
|
+
var MAX_BUFFER_SIZE_BYTES = 500 * 1024;
|
|
1316
|
+
var SAMPLE_RATE = 16e3;
|
|
1317
|
+
var FILE_CHUNK_DURATION_SEC = 30;
|
|
1318
|
+
var FILE_CHUNK_SAMPLES = FILE_CHUNK_DURATION_SEC * SAMPLE_RATE;
|
|
1319
|
+
var MODEL_MEMORY_REQUIREMENTS_BYTES = {
|
|
1320
|
+
"ggml-tiny.bin": 450 * 1024 * 1024,
|
|
1321
|
+
"ggml-base.bin": 800 * 1024 * 1024,
|
|
1322
|
+
"ggml-small.bin": 1400 * 1024 * 1024,
|
|
1323
|
+
"ggml-medium.bin": 2800 * 1024 * 1024,
|
|
1324
|
+
"ggml-large-v3.bin": 5200 * 1024 * 1024
|
|
1325
|
+
};
|
|
1326
|
+
var WhisperService = class extends EventEmitter {
|
|
1327
|
+
config;
|
|
1328
|
+
isInitialized = false;
|
|
1329
|
+
isProcessing = false;
|
|
1330
|
+
whisperModule = null;
|
|
1331
|
+
// Audio buffering for batch processing
|
|
1332
|
+
audioBuffer = [];
|
|
1333
|
+
bufferStartTime = 0;
|
|
1334
|
+
totalBufferDuration = 0;
|
|
1335
|
+
totalBufferBytes = 0;
|
|
1336
|
+
// Processing state
|
|
1337
|
+
processingInterval = null;
|
|
1338
|
+
// Callbacks
|
|
1339
|
+
transcriptCallbacks = [];
|
|
1340
|
+
errorCallbacks = [];
|
|
1341
|
+
constructor(config) {
|
|
1342
|
+
super();
|
|
1343
|
+
this.config = { ...DEFAULT_CONFIG, ...config };
|
|
1344
|
+
if (!this.config.modelPath) {
|
|
1345
|
+
this.config.modelPath = this.getDefaultModelPath();
|
|
1346
|
+
}
|
|
1347
|
+
}
|
|
1348
|
+
// ============================================================================
|
|
1349
|
+
// Public API
|
|
1350
|
+
// ============================================================================
|
|
1351
|
+
/**
|
|
1352
|
+
* Check if Whisper model is available
|
|
1353
|
+
*/
|
|
1354
|
+
isModelAvailable() {
|
|
1355
|
+
return existsSync2(this.config.modelPath);
|
|
1356
|
+
}
|
|
1357
|
+
/**
|
|
1358
|
+
* Get the path where models should be stored
|
|
1359
|
+
*/
|
|
1360
|
+
getModelsDirectory() {
|
|
1361
|
+
try {
|
|
1362
|
+
const { app } = __require("electron");
|
|
1363
|
+
return join4(app.getPath("userData"), "whisper-models");
|
|
1364
|
+
} catch {
|
|
1365
|
+
const homeDir = process.env.HOME || process.env.USERPROFILE || os2.homedir();
|
|
1366
|
+
return join4(homeDir, ".markupr", "whisper-models");
|
|
1367
|
+
}
|
|
1368
|
+
}
|
|
1369
|
+
/**
|
|
1370
|
+
* Get the default model path (whisper-medium)
|
|
1371
|
+
*/
|
|
1372
|
+
getDefaultModelPath() {
|
|
1373
|
+
return join4(this.getModelsDirectory(), "ggml-medium.bin");
|
|
1374
|
+
}
|
|
1375
|
+
/**
|
|
1376
|
+
* Set the model path
|
|
1377
|
+
*/
|
|
1378
|
+
setModelPath(modelPath) {
|
|
1379
|
+
this.config.modelPath = modelPath;
|
|
1380
|
+
this.isInitialized = false;
|
|
1381
|
+
}
|
|
1382
|
+
/**
|
|
1383
|
+
* Check if system has enough memory for Whisper
|
|
1384
|
+
* Requirement is model-aware (tiny/base/small/medium/large).
|
|
1385
|
+
*/
|
|
1386
|
+
hasEnoughMemory() {
|
|
1387
|
+
const freeMemory = os2.freemem();
|
|
1388
|
+
const requiredMemory = this.getRequiredMemoryBytes();
|
|
1389
|
+
return freeMemory >= requiredMemory;
|
|
1390
|
+
}
|
|
1391
|
+
/**
|
|
1392
|
+
* Get current memory info
|
|
1393
|
+
*/
|
|
1394
|
+
getMemoryInfo() {
|
|
1395
|
+
const freeMemory = os2.freemem();
|
|
1396
|
+
const requiredMemory = this.getRequiredMemoryBytes();
|
|
1397
|
+
return {
|
|
1398
|
+
freeMemoryMB: Math.round(freeMemory / 1024 / 1024),
|
|
1399
|
+
requiredMemoryMB: Math.round(requiredMemory / 1024 / 1024),
|
|
1400
|
+
sufficient: freeMemory >= requiredMemory
|
|
1401
|
+
};
|
|
1402
|
+
}
|
|
1403
|
+
/**
|
|
1404
|
+
* Initialize the Whisper model
|
|
1405
|
+
* Call this once before starting transcription
|
|
1406
|
+
*/
|
|
1407
|
+
async initialize() {
|
|
1408
|
+
if (this.isInitialized) {
|
|
1409
|
+
return;
|
|
1410
|
+
}
|
|
1411
|
+
if (!this.isModelAvailable()) {
|
|
1412
|
+
throw new Error(`Whisper model not found at ${this.config.modelPath}. Please download the model first.`);
|
|
1413
|
+
}
|
|
1414
|
+
if (!this.hasEnoughMemory()) {
|
|
1415
|
+
const memInfo = this.getMemoryInfo();
|
|
1416
|
+
throw new Error(
|
|
1417
|
+
`Insufficient memory for Whisper. Need ~${memInfo.requiredMemoryMB}MB free, only ${memInfo.freeMemoryMB}MB available.`
|
|
1418
|
+
);
|
|
1419
|
+
}
|
|
1420
|
+
this.log("Initializing Whisper model...");
|
|
1421
|
+
try {
|
|
1422
|
+
this.whisperModule = await import("whisper-node");
|
|
1423
|
+
if (!this.whisperModule || typeof this.whisperModule.whisper !== "function") {
|
|
1424
|
+
throw new Error("whisper-node module loaded but whisper function not found");
|
|
1425
|
+
}
|
|
1426
|
+
this.log("Pre-loading model with test transcription...");
|
|
1427
|
+
const testBuffer = new Float32Array(1600);
|
|
1428
|
+
await this.whisperModule.whisper(testBuffer, {
|
|
1429
|
+
modelPath: this.config.modelPath,
|
|
1430
|
+
language: this.config.language,
|
|
1431
|
+
threads: this.config.threads
|
|
1432
|
+
});
|
|
1433
|
+
this.isInitialized = true;
|
|
1434
|
+
this.log("Whisper model initialized successfully");
|
|
1435
|
+
} catch (error) {
|
|
1436
|
+
const initError = new Error(`Failed to initialize Whisper: ${error.message}`);
|
|
1437
|
+
this.errorCallbacks.forEach((cb) => cb(initError));
|
|
1438
|
+
throw initError;
|
|
1439
|
+
}
|
|
1440
|
+
}
|
|
1441
|
+
/**
|
|
1442
|
+
* Check if service is initialized and ready
|
|
1443
|
+
*/
|
|
1444
|
+
isReady() {
|
|
1445
|
+
return this.isInitialized && this.whisperModule !== null;
|
|
1446
|
+
}
|
|
1447
|
+
/**
|
|
1448
|
+
* Start accepting audio for transcription
|
|
1449
|
+
*/
|
|
1450
|
+
async start() {
|
|
1451
|
+
if (!this.isInitialized) {
|
|
1452
|
+
await this.initialize();
|
|
1453
|
+
}
|
|
1454
|
+
this.audioBuffer = [];
|
|
1455
|
+
this.bufferStartTime = Date.now();
|
|
1456
|
+
this.totalBufferDuration = 0;
|
|
1457
|
+
this.totalBufferBytes = 0;
|
|
1458
|
+
this.processingInterval = setInterval(() => {
|
|
1459
|
+
this.processBufferedAudio();
|
|
1460
|
+
}, CHUNK_DURATION_MS);
|
|
1461
|
+
this.log("Whisper transcription started");
|
|
1462
|
+
}
|
|
1463
|
+
/**
|
|
1464
|
+
* Stop transcription and process remaining audio
|
|
1465
|
+
*/
|
|
1466
|
+
async stop() {
|
|
1467
|
+
if (this.processingInterval) {
|
|
1468
|
+
clearInterval(this.processingInterval);
|
|
1469
|
+
this.processingInterval = null;
|
|
1470
|
+
}
|
|
1471
|
+
await this.processBufferedAudio(true);
|
|
1472
|
+
this.audioBuffer = [];
|
|
1473
|
+
this.totalBufferDuration = 0;
|
|
1474
|
+
this.totalBufferBytes = 0;
|
|
1475
|
+
this.log("Whisper transcription stopped");
|
|
1476
|
+
}
|
|
1477
|
+
/**
|
|
1478
|
+
* Add audio data to the buffer
|
|
1479
|
+
* @param samples Float32Array of audio samples at 16kHz mono
|
|
1480
|
+
* @param durationMs Duration of this chunk in milliseconds
|
|
1481
|
+
*/
|
|
1482
|
+
addAudio(samples, durationMs) {
|
|
1483
|
+
const chunkBytes = samples.byteLength;
|
|
1484
|
+
if (this.totalBufferBytes + chunkBytes > MAX_BUFFER_SIZE_BYTES) {
|
|
1485
|
+
this.log("Audio buffer full, force-processing before adding new audio");
|
|
1486
|
+
this.processBufferedAudio(true);
|
|
1487
|
+
}
|
|
1488
|
+
this.audioBuffer.push(samples);
|
|
1489
|
+
this.totalBufferDuration += durationMs;
|
|
1490
|
+
this.totalBufferBytes += chunkBytes;
|
|
1491
|
+
if (this.totalBufferDuration >= MAX_BUFFER_DURATION_MS) {
|
|
1492
|
+
this.processBufferedAudio(true);
|
|
1493
|
+
}
|
|
1494
|
+
}
|
|
1495
|
+
/**
|
|
1496
|
+
* Register callback for transcript results
|
|
1497
|
+
*/
|
|
1498
|
+
onTranscript(callback) {
|
|
1499
|
+
this.transcriptCallbacks.push(callback);
|
|
1500
|
+
return () => {
|
|
1501
|
+
this.transcriptCallbacks = this.transcriptCallbacks.filter((cb) => cb !== callback);
|
|
1502
|
+
};
|
|
1503
|
+
}
|
|
1504
|
+
/**
|
|
1505
|
+
* Register callback for errors
|
|
1506
|
+
*/
|
|
1507
|
+
onError(callback) {
|
|
1508
|
+
this.errorCallbacks.push(callback);
|
|
1509
|
+
return () => {
|
|
1510
|
+
this.errorCallbacks = this.errorCallbacks.filter((cb) => cb !== callback);
|
|
1511
|
+
};
|
|
1512
|
+
}
|
|
1513
|
+
/**
|
|
1514
|
+
* Get current configuration
|
|
1515
|
+
*/
|
|
1516
|
+
getConfig() {
|
|
1517
|
+
return { ...this.config };
|
|
1518
|
+
}
|
|
1519
|
+
/**
|
|
1520
|
+
* Transcribe a complete Float32 buffer in one pass.
|
|
1521
|
+
* Useful for post-session retry workflows when live streaming failed.
|
|
1522
|
+
*/
|
|
1523
|
+
async transcribeSamples(samples, startTimeSec) {
|
|
1524
|
+
if (!this.isInitialized) {
|
|
1525
|
+
await this.initialize();
|
|
1526
|
+
}
|
|
1527
|
+
if (!this.whisperModule) {
|
|
1528
|
+
throw new Error("Whisper module not loaded");
|
|
1529
|
+
}
|
|
1530
|
+
const CHUNK_TIMEOUT_MS = 6e4;
|
|
1531
|
+
let timeoutId;
|
|
1532
|
+
const timeoutPromise = new Promise((_, reject) => {
|
|
1533
|
+
timeoutId = setTimeout(
|
|
1534
|
+
() => reject(new Error("Whisper transcription timed out after 60s")),
|
|
1535
|
+
CHUNK_TIMEOUT_MS
|
|
1536
|
+
);
|
|
1537
|
+
});
|
|
1538
|
+
let result;
|
|
1539
|
+
try {
|
|
1540
|
+
result = await Promise.race([
|
|
1541
|
+
this.whisperModule.whisper(samples, {
|
|
1542
|
+
modelPath: this.config.modelPath,
|
|
1543
|
+
language: this.config.language,
|
|
1544
|
+
threads: this.config.threads,
|
|
1545
|
+
translate: this.config.translateToEnglish
|
|
1546
|
+
}),
|
|
1547
|
+
timeoutPromise
|
|
1548
|
+
]);
|
|
1549
|
+
} finally {
|
|
1550
|
+
if (timeoutId) clearTimeout(timeoutId);
|
|
1551
|
+
}
|
|
1552
|
+
if (!result || result.length === 0) {
|
|
1553
|
+
return [];
|
|
1554
|
+
}
|
|
1555
|
+
return result.map((segment) => ({
|
|
1556
|
+
text: segment.text.trim(),
|
|
1557
|
+
startTime: startTimeSec + segment.start,
|
|
1558
|
+
endTime: startTimeSec + segment.end,
|
|
1559
|
+
confidence: 0.9
|
|
1560
|
+
})).filter((segment) => segment.text.length > 0);
|
|
1561
|
+
}
|
|
1562
|
+
/**
|
|
1563
|
+
* Transcribe an audio file from disk.
|
|
1564
|
+
* Loads the file, converts to Float32Array at 16kHz mono, and transcribes.
|
|
1565
|
+
* For large files, processes in chunks to manage memory.
|
|
1566
|
+
*
|
|
1567
|
+
* @param audioPath - Path to the audio file (webm, wav, ogg, m4a)
|
|
1568
|
+
* @param onProgress - Optional progress callback (0-100)
|
|
1569
|
+
* @returns Array of transcript results with timestamps
|
|
1570
|
+
*/
|
|
1571
|
+
async transcribeFile(audioPath, onProgress) {
|
|
1572
|
+
if (!this.isInitialized) {
|
|
1573
|
+
await this.initialize();
|
|
1574
|
+
}
|
|
1575
|
+
if (!existsSync2(audioPath)) {
|
|
1576
|
+
throw new Error(`Audio file not found: ${audioPath}`);
|
|
1577
|
+
}
|
|
1578
|
+
this.log(`Transcribing file: ${audioPath}`);
|
|
1579
|
+
onProgress?.(0);
|
|
1580
|
+
const samples = await this.loadAudioAsSamples(audioPath);
|
|
1581
|
+
if (samples.length === 0) {
|
|
1582
|
+
this.log("Audio file produced no samples");
|
|
1583
|
+
onProgress?.(100);
|
|
1584
|
+
return [];
|
|
1585
|
+
}
|
|
1586
|
+
const totalChunks = Math.ceil(samples.length / FILE_CHUNK_SAMPLES);
|
|
1587
|
+
const results = [];
|
|
1588
|
+
this.log(`Processing ${totalChunks} chunk(s) (${(samples.length / SAMPLE_RATE).toFixed(1)}s total)`);
|
|
1589
|
+
for (let i = 0; i < totalChunks; i++) {
|
|
1590
|
+
const chunkStart = i * FILE_CHUNK_SAMPLES;
|
|
1591
|
+
const chunkEnd = Math.min(chunkStart + FILE_CHUNK_SAMPLES, samples.length);
|
|
1592
|
+
const chunk = samples.subarray(chunkStart, chunkEnd);
|
|
1593
|
+
const startTimeSec = chunkStart / SAMPLE_RATE;
|
|
1594
|
+
const chunkResults = await this.transcribeSamples(chunk, startTimeSec);
|
|
1595
|
+
results.push(...chunkResults);
|
|
1596
|
+
const percent = Math.round((i + 1) / totalChunks * 100);
|
|
1597
|
+
onProgress?.(percent);
|
|
1598
|
+
if (i < totalChunks - 1) {
|
|
1599
|
+
await new Promise((resolve4) => setTimeout(resolve4, 0));
|
|
1600
|
+
}
|
|
1601
|
+
}
|
|
1602
|
+
this.log(`Transcription complete: ${results.length} segment(s)`);
|
|
1603
|
+
return results;
|
|
1604
|
+
}
|
|
1605
|
+
/**
|
|
1606
|
+
* Check if ffmpeg is available on the system
|
|
1607
|
+
*/
|
|
1608
|
+
async isFfmpegAvailable() {
|
|
1609
|
+
try {
|
|
1610
|
+
await execFileAsync("ffmpeg", ["-version"]);
|
|
1611
|
+
return true;
|
|
1612
|
+
} catch {
|
|
1613
|
+
return false;
|
|
1614
|
+
}
|
|
1615
|
+
}
|
|
1616
|
+
// ============================================================================
|
|
1617
|
+
// Private Methods
|
|
1618
|
+
// ============================================================================
|
|
1619
|
+
/**
|
|
1620
|
+
* Load an audio file and return Float32Array samples at 16kHz mono.
|
|
1621
|
+
* WAV files are parsed directly; other formats are converted via ffmpeg.
|
|
1622
|
+
*/
|
|
1623
|
+
async loadAudioAsSamples(audioPath) {
|
|
1624
|
+
const ext = audioPath.toLowerCase().split(".").pop() ?? "";
|
|
1625
|
+
if (ext === "wav") {
|
|
1626
|
+
return this.parseWavFile(audioPath);
|
|
1627
|
+
}
|
|
1628
|
+
return this.convertWithFfmpeg(audioPath);
|
|
1629
|
+
}
|
|
1630
|
+
/**
|
|
1631
|
+
* Parse a WAV file and extract PCM data as Float32Array at 16kHz mono.
|
|
1632
|
+
* Handles PCM float32 and PCM int16 formats.
|
|
1633
|
+
*/
|
|
1634
|
+
async parseWavFile(wavPath) {
|
|
1635
|
+
const buffer = await readFile2(wavPath);
|
|
1636
|
+
const riff = buffer.toString("ascii", 0, 4);
|
|
1637
|
+
const wave = buffer.toString("ascii", 8, 12);
|
|
1638
|
+
if (riff !== "RIFF" || wave !== "WAVE") {
|
|
1639
|
+
throw new Error(`Invalid WAV file: missing RIFF/WAVE header in ${wavPath}`);
|
|
1640
|
+
}
|
|
1641
|
+
let offset = 12;
|
|
1642
|
+
let audioFormat = 0;
|
|
1643
|
+
let numChannels = 0;
|
|
1644
|
+
let sampleRate = 0;
|
|
1645
|
+
let bitsPerSample = 0;
|
|
1646
|
+
let fmtFound = false;
|
|
1647
|
+
while (offset < buffer.length - 8) {
|
|
1648
|
+
const chunkId = buffer.toString("ascii", offset, offset + 4);
|
|
1649
|
+
const chunkSize = buffer.readUInt32LE(offset + 4);
|
|
1650
|
+
if (chunkId === "fmt ") {
|
|
1651
|
+
audioFormat = buffer.readUInt16LE(offset + 8);
|
|
1652
|
+
numChannels = buffer.readUInt16LE(offset + 10);
|
|
1653
|
+
sampleRate = buffer.readUInt32LE(offset + 12);
|
|
1654
|
+
bitsPerSample = buffer.readUInt16LE(offset + 22);
|
|
1655
|
+
fmtFound = true;
|
|
1656
|
+
}
|
|
1657
|
+
if (chunkId === "data") {
|
|
1658
|
+
if (!fmtFound) {
|
|
1659
|
+
throw new Error("WAV file has data chunk before fmt chunk");
|
|
1660
|
+
}
|
|
1661
|
+
const dataStart = offset + 8;
|
|
1662
|
+
const dataEnd = dataStart + chunkSize;
|
|
1663
|
+
const dataSlice = buffer.subarray(dataStart, Math.min(dataEnd, buffer.length));
|
|
1664
|
+
return this.extractWavSamples(dataSlice, audioFormat, numChannels, sampleRate, bitsPerSample);
|
|
1665
|
+
}
|
|
1666
|
+
offset += 8 + chunkSize;
|
|
1667
|
+
if (chunkSize % 2 !== 0) {
|
|
1668
|
+
offset += 1;
|
|
1669
|
+
}
|
|
1670
|
+
}
|
|
1671
|
+
throw new Error(`Invalid WAV file: no data chunk found in ${wavPath}`);
|
|
1672
|
+
}
|
|
1673
|
+
/**
|
|
1674
|
+
* Extract samples from WAV data chunk, converting to Float32Array at 16kHz mono.
|
|
1675
|
+
*/
|
|
1676
|
+
extractWavSamples(data, audioFormat, numChannels, sampleRate, bitsPerSample) {
|
|
1677
|
+
let monoFloat32;
|
|
1678
|
+
if (audioFormat === 3 && bitsPerSample === 32) {
|
|
1679
|
+
const totalSamples = Math.floor(data.length / 4);
|
|
1680
|
+
const allSamples = new Float32Array(totalSamples);
|
|
1681
|
+
for (let i = 0; i < totalSamples; i++) {
|
|
1682
|
+
allSamples[i] = data.readFloatLE(i * 4);
|
|
1683
|
+
}
|
|
1684
|
+
monoFloat32 = this.mixToMono(allSamples, numChannels);
|
|
1685
|
+
} else if (audioFormat === 1 && bitsPerSample === 16) {
|
|
1686
|
+
const totalSamples = Math.floor(data.length / 2);
|
|
1687
|
+
const allSamples = new Float32Array(totalSamples);
|
|
1688
|
+
for (let i = 0; i < totalSamples; i++) {
|
|
1689
|
+
allSamples[i] = data.readInt16LE(i * 2) / 32768;
|
|
1690
|
+
}
|
|
1691
|
+
monoFloat32 = this.mixToMono(allSamples, numChannels);
|
|
1692
|
+
} else {
|
|
1693
|
+
throw new Error(
|
|
1694
|
+
`Unsupported WAV format: audioFormat=${audioFormat}, bitsPerSample=${bitsPerSample}. Expected PCM float32 (format=3, bits=32) or PCM int16 (format=1, bits=16).`
|
|
1695
|
+
);
|
|
1696
|
+
}
|
|
1697
|
+
if (sampleRate !== SAMPLE_RATE) {
|
|
1698
|
+
return this.resample(monoFloat32, sampleRate, SAMPLE_RATE);
|
|
1699
|
+
}
|
|
1700
|
+
return monoFloat32;
|
|
1701
|
+
}
|
|
1702
|
+
/**
|
|
1703
|
+
* Mix multi-channel audio down to mono by averaging channels.
|
|
1704
|
+
*/
|
|
1705
|
+
mixToMono(samples, numChannels) {
|
|
1706
|
+
if (numChannels === 1) {
|
|
1707
|
+
return samples;
|
|
1708
|
+
}
|
|
1709
|
+
const monoLength = Math.floor(samples.length / numChannels);
|
|
1710
|
+
const mono = new Float32Array(monoLength);
|
|
1711
|
+
for (let i = 0; i < monoLength; i++) {
|
|
1712
|
+
let sum = 0;
|
|
1713
|
+
for (let ch = 0; ch < numChannels; ch++) {
|
|
1714
|
+
sum += samples[i * numChannels + ch];
|
|
1715
|
+
}
|
|
1716
|
+
mono[i] = sum / numChannels;
|
|
1717
|
+
}
|
|
1718
|
+
return mono;
|
|
1719
|
+
}
|
|
1720
|
+
/**
|
|
1721
|
+
* Simple linear resampling from one sample rate to another.
|
|
1722
|
+
*/
|
|
1723
|
+
resample(samples, fromRate, toRate) {
|
|
1724
|
+
if (fromRate === toRate) {
|
|
1725
|
+
return samples;
|
|
1726
|
+
}
|
|
1727
|
+
const ratio = fromRate / toRate;
|
|
1728
|
+
const outputLength = Math.floor(samples.length / ratio);
|
|
1729
|
+
const output = new Float32Array(outputLength);
|
|
1730
|
+
for (let i = 0; i < outputLength; i++) {
|
|
1731
|
+
const srcIndex = i * ratio;
|
|
1732
|
+
const srcIndexFloor = Math.floor(srcIndex);
|
|
1733
|
+
const srcIndexCeil = Math.min(srcIndexFloor + 1, samples.length - 1);
|
|
1734
|
+
const frac = srcIndex - srcIndexFloor;
|
|
1735
|
+
output[i] = samples[srcIndexFloor] * (1 - frac) + samples[srcIndexCeil] * frac;
|
|
1736
|
+
}
|
|
1737
|
+
return output;
|
|
1738
|
+
}
|
|
1739
|
+
/**
|
|
1740
|
+
* Convert a non-WAV audio file to 16kHz mono Float32 WAV using ffmpeg,
|
|
1741
|
+
* then parse the resulting WAV.
|
|
1742
|
+
*/
|
|
1743
|
+
async convertWithFfmpeg(audioPath) {
|
|
1744
|
+
const ffmpegAvailable = await this.isFfmpegAvailable();
|
|
1745
|
+
if (!ffmpegAvailable) {
|
|
1746
|
+
const installHint = process.platform === "darwin" ? "brew install ffmpeg" : process.platform === "win32" ? "winget install ffmpeg or download from https://ffmpeg.org" : "apt install ffmpeg (Debian/Ubuntu) or dnf install ffmpeg (Fedora)";
|
|
1747
|
+
throw new Error(
|
|
1748
|
+
`ffmpeg is not available on this system. ffmpeg is required to transcribe non-WAV audio files (webm, ogg, m4a). Install ffmpeg via: ${installHint}.`
|
|
1749
|
+
);
|
|
1750
|
+
}
|
|
1751
|
+
const tempFileName = `markupr-transcode-${randomUUID()}.wav`;
|
|
1752
|
+
const tempPath = join4(tmpdir(), tempFileName);
|
|
1753
|
+
try {
|
|
1754
|
+
this.log(`Converting ${audioPath} to WAV via ffmpeg...`);
|
|
1755
|
+
await execFileAsync("ffmpeg", [
|
|
1756
|
+
"-i",
|
|
1757
|
+
audioPath,
|
|
1758
|
+
"-ar",
|
|
1759
|
+
String(SAMPLE_RATE),
|
|
1760
|
+
"-ac",
|
|
1761
|
+
"1",
|
|
1762
|
+
"-f",
|
|
1763
|
+
"wav",
|
|
1764
|
+
"-acodec",
|
|
1765
|
+
"pcm_f32le",
|
|
1766
|
+
"-y",
|
|
1767
|
+
tempPath
|
|
1768
|
+
], {
|
|
1769
|
+
env: { PATH: process.env.PATH, HOME: process.env.HOME, LANG: process.env.LANG, TMPDIR: process.env.TMPDIR }
|
|
1770
|
+
});
|
|
1771
|
+
await chmod(tempPath, 384).catch(() => {
|
|
1772
|
+
});
|
|
1773
|
+
this.log("ffmpeg conversion complete, parsing WAV...");
|
|
1774
|
+
return await this.parseWavFile(tempPath);
|
|
1775
|
+
} catch (error) {
|
|
1776
|
+
const msg = error instanceof Error ? error.message : String(error);
|
|
1777
|
+
throw new Error(`Failed to convert audio file with ffmpeg: ${msg}`);
|
|
1778
|
+
} finally {
|
|
1779
|
+
try {
|
|
1780
|
+
await unlink(tempPath);
|
|
1781
|
+
} catch {
|
|
1782
|
+
}
|
|
1783
|
+
}
|
|
1784
|
+
}
|
|
1785
|
+
/**
|
|
1786
|
+
* Process buffered audio through Whisper
|
|
1787
|
+
*/
|
|
1788
|
+
async processBufferedAudio(force = false) {
|
|
1789
|
+
if (this.isProcessing) {
|
|
1790
|
+
return;
|
|
1791
|
+
}
|
|
1792
|
+
if (!force && this.totalBufferDuration < CHUNK_DURATION_MS) {
|
|
1793
|
+
return;
|
|
1794
|
+
}
|
|
1795
|
+
if (this.audioBuffer.length === 0) {
|
|
1796
|
+
return;
|
|
1797
|
+
}
|
|
1798
|
+
if (!this.whisperModule) {
|
|
1799
|
+
this.logError("Cannot process: Whisper module not loaded");
|
|
1800
|
+
return;
|
|
1801
|
+
}
|
|
1802
|
+
this.isProcessing = true;
|
|
1803
|
+
const processStartTime = this.bufferStartTime;
|
|
1804
|
+
try {
|
|
1805
|
+
const totalSamples = this.audioBuffer.reduce((sum, arr) => sum + arr.length, 0);
|
|
1806
|
+
const combinedAudio = new Float32Array(totalSamples);
|
|
1807
|
+
let offset = 0;
|
|
1808
|
+
for (const chunk of this.audioBuffer) {
|
|
1809
|
+
combinedAudio.set(chunk, offset);
|
|
1810
|
+
offset += chunk.length;
|
|
1811
|
+
}
|
|
1812
|
+
const processedDuration = this.totalBufferDuration;
|
|
1813
|
+
this.audioBuffer = [];
|
|
1814
|
+
this.totalBufferDuration = 0;
|
|
1815
|
+
this.totalBufferBytes = 0;
|
|
1816
|
+
this.bufferStartTime = Date.now();
|
|
1817
|
+
this.log(`Processing ${Math.round(processedDuration)}ms of audio...`);
|
|
1818
|
+
const result = await this.whisperModule.whisper(combinedAudio, {
|
|
1819
|
+
modelPath: this.config.modelPath,
|
|
1820
|
+
language: this.config.language,
|
|
1821
|
+
threads: this.config.threads,
|
|
1822
|
+
translate: this.config.translateToEnglish
|
|
1823
|
+
});
|
|
1824
|
+
if (result && result.length > 0) {
|
|
1825
|
+
for (const segment of result) {
|
|
1826
|
+
const transcriptResult = {
|
|
1827
|
+
text: segment.text.trim(),
|
|
1828
|
+
startTime: processStartTime / 1e3 + segment.start,
|
|
1829
|
+
endTime: processStartTime / 1e3 + segment.end,
|
|
1830
|
+
confidence: 0.9
|
|
1831
|
+
// Whisper doesn't provide confidence, use default
|
|
1832
|
+
};
|
|
1833
|
+
if (transcriptResult.text) {
|
|
1834
|
+
this.transcriptCallbacks.forEach((cb) => cb(transcriptResult));
|
|
1835
|
+
this.emit("transcript", transcriptResult);
|
|
1836
|
+
const preview = transcriptResult.text.length > 50 ? `${transcriptResult.text.substring(0, 50)}...` : transcriptResult.text;
|
|
1837
|
+
this.log(`Transcript: "${preview}"`);
|
|
1838
|
+
}
|
|
1839
|
+
}
|
|
1840
|
+
}
|
|
1841
|
+
} catch (error) {
|
|
1842
|
+
const transcriptionError = new Error(`Whisper transcription failed: ${error.message}`);
|
|
1843
|
+
this.errorCallbacks.forEach((cb) => cb(transcriptionError));
|
|
1844
|
+
this.emit("error", transcriptionError);
|
|
1845
|
+
this.logError("Transcription error", error);
|
|
1846
|
+
} finally {
|
|
1847
|
+
this.isProcessing = false;
|
|
1848
|
+
}
|
|
1849
|
+
}
|
|
1850
|
+
/**
|
|
1851
|
+
* Log helper
|
|
1852
|
+
*/
|
|
1853
|
+
log(message) {
|
|
1854
|
+
const timestamp = (/* @__PURE__ */ new Date()).toISOString();
|
|
1855
|
+
console.log(`[WhisperService ${timestamp}] ${message}`);
|
|
1856
|
+
}
|
|
1857
|
+
/**
|
|
1858
|
+
* Error log helper
|
|
1859
|
+
*/
|
|
1860
|
+
logError(message, error) {
|
|
1861
|
+
const timestamp = (/* @__PURE__ */ new Date()).toISOString();
|
|
1862
|
+
const errorStr = error instanceof Error ? error.message : String(error);
|
|
1863
|
+
console.error(`[WhisperService ${timestamp}] ERROR: ${message} - ${errorStr}`);
|
|
1864
|
+
}
|
|
1865
|
+
getRequiredMemoryBytes() {
|
|
1866
|
+
const modelName = basename2(this.config.modelPath);
|
|
1867
|
+
return MODEL_MEMORY_REQUIREMENTS_BYTES[modelName] ?? MODEL_MEMORY_REQUIREMENTS_BYTES["ggml-small.bin"];
|
|
1868
|
+
}
|
|
1869
|
+
};
|
|
1870
|
+
var whisperService = new WhisperService();
|
|
1871
|
+
|
|
1872
|
+
// src/main/output/templates/registry.ts
|
|
1873
|
+
var TemplateRegistryImpl = class {
|
|
1874
|
+
templates = /* @__PURE__ */ new Map();
|
|
1875
|
+
/**
|
|
1876
|
+
* Register a template. Overwrites any existing template with the same name.
|
|
1877
|
+
*/
|
|
1878
|
+
register(template) {
|
|
1879
|
+
this.templates.set(template.name, template);
|
|
1880
|
+
}
|
|
1881
|
+
/**
|
|
1882
|
+
* Get a template by name. Returns undefined if not found.
|
|
1883
|
+
*/
|
|
1884
|
+
get(name) {
|
|
1885
|
+
return this.templates.get(name);
|
|
1886
|
+
}
|
|
1887
|
+
/**
|
|
1888
|
+
* Check if a template with the given name exists.
|
|
1889
|
+
*/
|
|
1890
|
+
has(name) {
|
|
1891
|
+
return this.templates.has(name);
|
|
1892
|
+
}
|
|
1893
|
+
/**
|
|
1894
|
+
* List all registered template names.
|
|
1895
|
+
*/
|
|
1896
|
+
list() {
|
|
1897
|
+
return Array.from(this.templates.keys());
|
|
1898
|
+
}
|
|
1899
|
+
/**
|
|
1900
|
+
* List all registered templates with their descriptions.
|
|
1901
|
+
*/
|
|
1902
|
+
listWithDescriptions() {
|
|
1903
|
+
return Array.from(this.templates.values()).map((t) => ({
|
|
1904
|
+
name: t.name,
|
|
1905
|
+
description: t.description,
|
|
1906
|
+
fileExtension: t.fileExtension
|
|
1907
|
+
}));
|
|
1908
|
+
}
|
|
1909
|
+
/**
|
|
1910
|
+
* Get the default template name.
|
|
1911
|
+
*/
|
|
1912
|
+
getDefault() {
|
|
1913
|
+
return "markdown";
|
|
1914
|
+
}
|
|
1915
|
+
};
|
|
1916
|
+
var templateRegistry = new TemplateRegistryImpl();
|
|
1917
|
+
|
|
1918
|
+
// src/main/output/templates/helpers.ts
|
|
1919
|
+
import * as path3 from "path";
|
|
1920
|
+
function formatTimestamp(seconds) {
|
|
1921
|
+
const totalSeconds = Math.max(0, Math.floor(seconds));
|
|
1922
|
+
const mins = Math.floor(totalSeconds / 60);
|
|
1923
|
+
const secs = totalSeconds % 60;
|
|
1924
|
+
return `${mins}:${secs.toString().padStart(2, "0")}`;
|
|
1925
|
+
}
|
|
1926
|
+
function formatDuration(ms) {
|
|
1927
|
+
const totalSeconds = Math.floor(ms / 1e3);
|
|
1928
|
+
const mins = Math.floor(totalSeconds / 60);
|
|
1929
|
+
const secs = totalSeconds % 60;
|
|
1930
|
+
return `${mins}:${secs.toString().padStart(2, "0")}`;
|
|
1931
|
+
}
|
|
1932
|
+
function formatDate(date) {
|
|
1933
|
+
const months = ["Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"];
|
|
1934
|
+
const month = months[date.getMonth()];
|
|
1935
|
+
const day = date.getDate();
|
|
1936
|
+
const year = date.getFullYear();
|
|
1937
|
+
const rawHours = date.getHours();
|
|
1938
|
+
const ampm = rawHours >= 12 ? "PM" : "AM";
|
|
1939
|
+
const hours = rawHours % 12 || 12;
|
|
1940
|
+
const minutes = date.getMinutes().toString().padStart(2, "0");
|
|
1941
|
+
return `${month} ${day}, ${year} at ${hours}:${minutes} ${ampm}`;
|
|
1942
|
+
}
|
|
1943
|
+
function generateSegmentTitle(text) {
|
|
1944
|
+
const firstSentence = text.split(/[.!?]/)[0].trim();
|
|
1945
|
+
if (firstSentence.length <= 60) return firstSentence;
|
|
1946
|
+
return firstSentence.slice(0, 57) + "...";
|
|
1947
|
+
}
|
|
1948
|
+
function wrapTranscription(transcription) {
|
|
1949
|
+
if (!transcription.includes(".") && !transcription.includes("!") && !transcription.includes("?")) {
|
|
1950
|
+
return transcription;
|
|
1951
|
+
}
|
|
1952
|
+
const sentences = transcription.split(/(?<=[.!?])\s+/).map((s) => s.trim()).filter(Boolean);
|
|
1953
|
+
if (sentences.length <= 1) return transcription;
|
|
1954
|
+
return sentences.join("\n> ");
|
|
1955
|
+
}
|
|
1956
|
+
function computeRelativeFramePath(framePath, sessionDir) {
|
|
1957
|
+
if (!path3.isAbsolute(framePath)) {
|
|
1958
|
+
return framePath;
|
|
1959
|
+
}
|
|
1960
|
+
return path3.relative(sessionDir, framePath);
|
|
1961
|
+
}
|
|
1962
|
+
function computeSessionDuration(segments) {
|
|
1963
|
+
if (segments.length === 0) return "0:00";
|
|
1964
|
+
return formatDuration(
|
|
1965
|
+
(segments[segments.length - 1].endTime - segments[0].startTime) * 1e3
|
|
1966
|
+
);
|
|
1967
|
+
}
|
|
1968
|
+
function mapFramesToSegments(segments, frames) {
|
|
1969
|
+
const map = /* @__PURE__ */ new Map();
|
|
1970
|
+
for (const frame of frames) {
|
|
1971
|
+
let bestIndex = 0;
|
|
1972
|
+
let bestDistance = Infinity;
|
|
1973
|
+
for (let i = 0; i < segments.length; i++) {
|
|
1974
|
+
const seg = segments[i];
|
|
1975
|
+
if (frame.timestamp >= seg.startTime && frame.timestamp <= seg.endTime) {
|
|
1976
|
+
bestIndex = i;
|
|
1977
|
+
bestDistance = 0;
|
|
1978
|
+
break;
|
|
1979
|
+
}
|
|
1980
|
+
const distance = Math.abs(frame.timestamp - seg.startTime);
|
|
1981
|
+
if (distance < bestDistance) {
|
|
1982
|
+
bestDistance = distance;
|
|
1983
|
+
bestIndex = i;
|
|
1984
|
+
}
|
|
1985
|
+
}
|
|
1986
|
+
const existing = map.get(bestIndex) || [];
|
|
1987
|
+
existing.push(frame);
|
|
1988
|
+
map.set(bestIndex, existing);
|
|
1989
|
+
}
|
|
1990
|
+
for (const [, frameList] of map) {
|
|
1991
|
+
frameList.sort((a, b) => a.timestamp - b.timestamp);
|
|
1992
|
+
}
|
|
1993
|
+
return map;
|
|
1994
|
+
}
|
|
1995
|
+
|
|
1996
|
+
// src/main/output/templates/markdown.ts
|
|
1997
|
+
var REPORT_SUPPORT_LINE2 = "*If this report saved you time, support development: [Ko-fi](https://ko-fi.com/eddiesanjuan)*";
|
|
1998
|
+
var markdownTemplate = {
|
|
1999
|
+
name: "markdown",
|
|
2000
|
+
description: "Default Markdown format \u2014 AI-ready, llms.txt-inspired structured document",
|
|
2001
|
+
fileExtension: ".md",
|
|
2002
|
+
render(context) {
|
|
2003
|
+
const { result, sessionDir, timestamp } = context;
|
|
2004
|
+
const { transcriptSegments, extractedFrames } = result;
|
|
2005
|
+
const sessionTimestamp = formatDate(new Date(timestamp ?? Date.now()));
|
|
2006
|
+
const sessionDuration = computeSessionDuration(transcriptSegments);
|
|
2007
|
+
let md = `# markupr Session \u2014 ${sessionTimestamp}
|
|
2008
|
+
`;
|
|
2009
|
+
md += `> Segments: ${transcriptSegments.length} | Frames: ${extractedFrames.length} | Duration: ${sessionDuration}
|
|
2010
|
+
|
|
2011
|
+
`;
|
|
2012
|
+
if (transcriptSegments.length === 0) {
|
|
2013
|
+
md += `_No speech was detected during this recording._
|
|
2014
|
+
`;
|
|
2015
|
+
return { content: md, fileExtension: ".md" };
|
|
2016
|
+
}
|
|
2017
|
+
md += `## Transcript
|
|
2018
|
+
|
|
2019
|
+
`;
|
|
2020
|
+
const segmentFrameMap = mapFramesToSegments(transcriptSegments, extractedFrames);
|
|
2021
|
+
for (let i = 0; i < transcriptSegments.length; i++) {
|
|
2022
|
+
const segment = transcriptSegments[i];
|
|
2023
|
+
const formattedTime = formatTimestamp(segment.startTime);
|
|
2024
|
+
const title = generateSegmentTitle(segment.text);
|
|
2025
|
+
md += `### [${formattedTime}] ${title}
|
|
2026
|
+
`;
|
|
2027
|
+
md += `> ${wrapTranscription(segment.text)}
|
|
2028
|
+
|
|
2029
|
+
`;
|
|
2030
|
+
const frames = segmentFrameMap.get(i);
|
|
2031
|
+
if (frames && frames.length > 0) {
|
|
2032
|
+
for (const frame of frames) {
|
|
2033
|
+
const frameTimestamp = formatTimestamp(frame.timestamp);
|
|
2034
|
+
const relativePath = computeRelativeFramePath(frame.path, sessionDir);
|
|
2035
|
+
md += `
|
|
2036
|
+
|
|
2037
|
+
`;
|
|
2038
|
+
}
|
|
2039
|
+
}
|
|
2040
|
+
}
|
|
2041
|
+
md += `---
|
|
2042
|
+
*Generated by [markupr](https://markupr.com)*
|
|
2043
|
+
${REPORT_SUPPORT_LINE2}
|
|
2044
|
+
`;
|
|
2045
|
+
return { content: md, fileExtension: ".md" };
|
|
2046
|
+
}
|
|
2047
|
+
};
|
|
2048
|
+
|
|
2049
|
+
// src/main/output/templates/json.ts
|
|
2050
|
+
var jsonTemplate = {
|
|
2051
|
+
name: "json",
|
|
2052
|
+
description: "Structured JSON output for programmatic consumption",
|
|
2053
|
+
fileExtension: ".json",
|
|
2054
|
+
render(context) {
|
|
2055
|
+
const { result, sessionDir, timestamp } = context;
|
|
2056
|
+
const { transcriptSegments, extractedFrames } = result;
|
|
2057
|
+
const segmentFrameMap = mapFramesToSegments(transcriptSegments, extractedFrames);
|
|
2058
|
+
const output = {
|
|
2059
|
+
version: "1.0",
|
|
2060
|
+
generator: "markupr",
|
|
2061
|
+
timestamp: new Date(timestamp ?? Date.now()).toISOString(),
|
|
2062
|
+
summary: {
|
|
2063
|
+
segments: transcriptSegments.length,
|
|
2064
|
+
frames: extractedFrames.length,
|
|
2065
|
+
duration: computeSessionDuration(transcriptSegments)
|
|
2066
|
+
},
|
|
2067
|
+
segments: transcriptSegments.map((segment, i) => {
|
|
2068
|
+
const frames = segmentFrameMap.get(i) || [];
|
|
2069
|
+
return {
|
|
2070
|
+
text: segment.text,
|
|
2071
|
+
startTime: segment.startTime,
|
|
2072
|
+
endTime: segment.endTime,
|
|
2073
|
+
confidence: segment.confidence,
|
|
2074
|
+
frames: frames.map((f) => ({
|
|
2075
|
+
path: computeRelativeFramePath(f.path, sessionDir),
|
|
2076
|
+
timestamp: f.timestamp,
|
|
2077
|
+
reason: f.reason
|
|
2078
|
+
}))
|
|
2079
|
+
};
|
|
2080
|
+
})
|
|
2081
|
+
};
|
|
2082
|
+
return {
|
|
2083
|
+
content: JSON.stringify(output, null, 2),
|
|
2084
|
+
fileExtension: ".json"
|
|
2085
|
+
};
|
|
2086
|
+
}
|
|
2087
|
+
};
|
|
2088
|
+
|
|
2089
|
+
// src/main/output/templates/github-issue.ts
|
|
2090
|
+
var githubIssueTemplate = {
|
|
2091
|
+
name: "github-issue",
|
|
2092
|
+
description: "GitHub-flavored Markdown optimized for issue bodies with task lists and collapsible details",
|
|
2093
|
+
fileExtension: ".md",
|
|
2094
|
+
render(context) {
|
|
2095
|
+
const { result, sessionDir, timestamp } = context;
|
|
2096
|
+
const { transcriptSegments, extractedFrames } = result;
|
|
2097
|
+
const sessionTimestamp = formatDate(new Date(timestamp ?? Date.now()));
|
|
2098
|
+
const duration = computeSessionDuration(transcriptSegments);
|
|
2099
|
+
let md = `## Feedback Report
|
|
2100
|
+
|
|
2101
|
+
`;
|
|
2102
|
+
md += `> Captured by [markupr](https://markupr.com) on ${sessionTimestamp}
|
|
2103
|
+
`;
|
|
2104
|
+
md += `> ${transcriptSegments.length} segments | ${extractedFrames.length} frames | Duration: ${duration}
|
|
2105
|
+
|
|
2106
|
+
`;
|
|
2107
|
+
if (transcriptSegments.length === 0) {
|
|
2108
|
+
md += `_No feedback was captured during this recording._
|
|
2109
|
+
`;
|
|
2110
|
+
return { content: md, fileExtension: ".md" };
|
|
2111
|
+
}
|
|
2112
|
+
md += `### Action Items
|
|
2113
|
+
|
|
2114
|
+
`;
|
|
2115
|
+
for (const segment of transcriptSegments) {
|
|
2116
|
+
const title = generateSegmentTitle(segment.text);
|
|
2117
|
+
md += `- [ ] ${title}
|
|
2118
|
+
`;
|
|
2119
|
+
}
|
|
2120
|
+
md += `
|
|
2121
|
+
`;
|
|
2122
|
+
md += `### Details
|
|
2123
|
+
|
|
2124
|
+
`;
|
|
2125
|
+
const segmentFrameMap = mapFramesToSegments(transcriptSegments, extractedFrames);
|
|
2126
|
+
for (let i = 0; i < transcriptSegments.length; i++) {
|
|
2127
|
+
const segment = transcriptSegments[i];
|
|
2128
|
+
const formattedTime = formatTimestamp(segment.startTime);
|
|
2129
|
+
const title = generateSegmentTitle(segment.text);
|
|
2130
|
+
md += `<details>
|
|
2131
|
+
`;
|
|
2132
|
+
md += `<summary><strong>[${formattedTime}] ${title}</strong></summary>
|
|
2133
|
+
|
|
2134
|
+
`;
|
|
2135
|
+
md += `${segment.text}
|
|
2136
|
+
|
|
2137
|
+
`;
|
|
2138
|
+
const frames = segmentFrameMap.get(i);
|
|
2139
|
+
if (frames && frames.length > 0) {
|
|
2140
|
+
for (const frame of frames) {
|
|
2141
|
+
const relativePath = computeRelativeFramePath(frame.path, sessionDir);
|
|
2142
|
+
md += `
|
|
2143
|
+
|
|
2144
|
+
`;
|
|
2145
|
+
}
|
|
2146
|
+
}
|
|
2147
|
+
md += `</details>
|
|
2148
|
+
|
|
2149
|
+
`;
|
|
2150
|
+
}
|
|
2151
|
+
md += `---
|
|
2152
|
+
_Generated by [markupr](https://markupr.com)_
|
|
2153
|
+
`;
|
|
2154
|
+
return { content: md, fileExtension: ".md" };
|
|
2155
|
+
}
|
|
2156
|
+
};
|
|
2157
|
+
|
|
2158
|
+
// src/main/output/templates/linear.ts
|
|
2159
|
+
var linearTemplate = {
|
|
2160
|
+
name: "linear",
|
|
2161
|
+
description: "Linear-compatible Markdown for issue descriptions",
|
|
2162
|
+
fileExtension: ".md",
|
|
2163
|
+
render(context) {
|
|
2164
|
+
const { result, sessionDir, timestamp } = context;
|
|
2165
|
+
const { transcriptSegments, extractedFrames } = result;
|
|
2166
|
+
const sessionTimestamp = formatDate(new Date(timestamp ?? Date.now()));
|
|
2167
|
+
const duration = computeSessionDuration(transcriptSegments);
|
|
2168
|
+
let md = `**Feedback Report** \u2014 ${sessionTimestamp}
|
|
2169
|
+
`;
|
|
2170
|
+
md += `${transcriptSegments.length} segments | ${extractedFrames.length} frames | Duration: ${duration}
|
|
2171
|
+
|
|
2172
|
+
`;
|
|
2173
|
+
if (transcriptSegments.length === 0) {
|
|
2174
|
+
md += `_No feedback was captured during this recording._
|
|
2175
|
+
`;
|
|
2176
|
+
return { content: md, fileExtension: ".md" };
|
|
2177
|
+
}
|
|
2178
|
+
md += `**Action Items**
|
|
2179
|
+
|
|
2180
|
+
`;
|
|
2181
|
+
for (const segment of transcriptSegments) {
|
|
2182
|
+
const title = generateSegmentTitle(segment.text);
|
|
2183
|
+
md += `- [ ] ${title}
|
|
2184
|
+
`;
|
|
2185
|
+
}
|
|
2186
|
+
md += `
|
|
2187
|
+
---
|
|
2188
|
+
|
|
2189
|
+
`;
|
|
2190
|
+
const segmentFrameMap = mapFramesToSegments(transcriptSegments, extractedFrames);
|
|
2191
|
+
for (let i = 0; i < transcriptSegments.length; i++) {
|
|
2192
|
+
const segment = transcriptSegments[i];
|
|
2193
|
+
const formattedTime = formatTimestamp(segment.startTime);
|
|
2194
|
+
const title = generateSegmentTitle(segment.text);
|
|
2195
|
+
md += `### [${formattedTime}] ${title}
|
|
2196
|
+
|
|
2197
|
+
`;
|
|
2198
|
+
md += `> ${segment.text}
|
|
2199
|
+
|
|
2200
|
+
`;
|
|
2201
|
+
const frames = segmentFrameMap.get(i);
|
|
2202
|
+
if (frames && frames.length > 0) {
|
|
2203
|
+
for (const frame of frames) {
|
|
2204
|
+
const relativePath = computeRelativeFramePath(frame.path, sessionDir);
|
|
2205
|
+
md += `
|
|
2206
|
+
|
|
2207
|
+
`;
|
|
2208
|
+
}
|
|
2209
|
+
}
|
|
2210
|
+
}
|
|
2211
|
+
md += `---
|
|
2212
|
+
_Captured by [markupr](https://markupr.com)_
|
|
2213
|
+
`;
|
|
2214
|
+
return { content: md, fileExtension: ".md" };
|
|
2215
|
+
}
|
|
2216
|
+
};
|
|
2217
|
+
|
|
2218
|
+
// src/main/output/templates/jira.ts
|
|
2219
|
+
var jiraTemplate = {
|
|
2220
|
+
name: "jira",
|
|
2221
|
+
description: "Jira wiki markup with panels, tables, and {code} blocks",
|
|
2222
|
+
fileExtension: ".jira",
|
|
2223
|
+
render(context) {
|
|
2224
|
+
const { result, sessionDir, timestamp } = context;
|
|
2225
|
+
const { transcriptSegments, extractedFrames } = result;
|
|
2226
|
+
const sessionTimestamp = formatDate(new Date(timestamp ?? Date.now()));
|
|
2227
|
+
const duration = computeSessionDuration(transcriptSegments);
|
|
2228
|
+
let content = `h1. Feedback Report
|
|
2229
|
+
|
|
2230
|
+
`;
|
|
2231
|
+
content += `{panel:title=Session Info|borderStyle=solid|borderColor=#ccc}
|
|
2232
|
+
`;
|
|
2233
|
+
content += `*Captured:* ${sessionTimestamp}
|
|
2234
|
+
`;
|
|
2235
|
+
content += `*Segments:* ${transcriptSegments.length} | *Frames:* ${extractedFrames.length} | *Duration:* ${duration}
|
|
2236
|
+
`;
|
|
2237
|
+
content += `{panel}
|
|
2238
|
+
|
|
2239
|
+
`;
|
|
2240
|
+
if (transcriptSegments.length === 0) {
|
|
2241
|
+
content += `_No feedback was captured during this recording._
|
|
2242
|
+
`;
|
|
2243
|
+
return { content, fileExtension: ".jira" };
|
|
2244
|
+
}
|
|
2245
|
+
content += `h2. Summary
|
|
2246
|
+
|
|
2247
|
+
`;
|
|
2248
|
+
content += `||#||Timestamp||Feedback||
|
|
2249
|
+
`;
|
|
2250
|
+
for (let i = 0; i < transcriptSegments.length; i++) {
|
|
2251
|
+
const segment = transcriptSegments[i];
|
|
2252
|
+
const formattedTime = formatTimestamp(segment.startTime);
|
|
2253
|
+
const title = generateSegmentTitle(segment.text);
|
|
2254
|
+
content += `|${i + 1}|${formattedTime}|${title}|
|
|
2255
|
+
`;
|
|
2256
|
+
}
|
|
2257
|
+
content += `
|
|
2258
|
+
`;
|
|
2259
|
+
content += `h2. Details
|
|
2260
|
+
|
|
2261
|
+
`;
|
|
2262
|
+
const segmentFrameMap = mapFramesToSegments(transcriptSegments, extractedFrames);
|
|
2263
|
+
for (let i = 0; i < transcriptSegments.length; i++) {
|
|
2264
|
+
const segment = transcriptSegments[i];
|
|
2265
|
+
const formattedTime = formatTimestamp(segment.startTime);
|
|
2266
|
+
const title = generateSegmentTitle(segment.text);
|
|
2267
|
+
content += `h3. \\[${formattedTime}\\] ${title}
|
|
2268
|
+
|
|
2269
|
+
`;
|
|
2270
|
+
content += `{quote}
|
|
2271
|
+
${segment.text}
|
|
2272
|
+
{quote}
|
|
2273
|
+
|
|
2274
|
+
`;
|
|
2275
|
+
const frames = segmentFrameMap.get(i);
|
|
2276
|
+
if (frames && frames.length > 0) {
|
|
2277
|
+
for (const frame of frames) {
|
|
2278
|
+
const relativePath = computeRelativeFramePath(frame.path, sessionDir);
|
|
2279
|
+
content += `!${relativePath}|thumbnail!
|
|
2280
|
+
|
|
2281
|
+
`;
|
|
2282
|
+
}
|
|
2283
|
+
}
|
|
2284
|
+
}
|
|
2285
|
+
content += `----
|
|
2286
|
+
_Generated by [markupr|https://markupr.com]_
|
|
2287
|
+
`;
|
|
2288
|
+
return { content, fileExtension: ".jira" };
|
|
2289
|
+
}
|
|
2290
|
+
};
|
|
2291
|
+
|
|
2292
|
+
// src/main/output/templates/index.ts
|
|
2293
|
+
templateRegistry.register(markdownTemplate);
|
|
2294
|
+
templateRegistry.register(jsonTemplate);
|
|
2295
|
+
templateRegistry.register(githubIssueTemplate);
|
|
2296
|
+
templateRegistry.register(linearTemplate);
|
|
2297
|
+
templateRegistry.register(jiraTemplate);
|
|
2298
|
+
|
|
2299
|
+
// src/cli/CLIPipeline.ts
|
|
2300
|
+
var CLIPipeline = class _CLIPipeline {
|
|
2301
|
+
options;
|
|
2302
|
+
log;
|
|
2303
|
+
progress;
|
|
2304
|
+
tempFiles = [];
|
|
2305
|
+
activeProcesses = /* @__PURE__ */ new Set();
|
|
2306
|
+
constructor(options, log2, progress) {
|
|
2307
|
+
this.options = options;
|
|
2308
|
+
this.log = log2;
|
|
2309
|
+
this.progress = progress ?? (() => {
|
|
2310
|
+
});
|
|
2311
|
+
}
|
|
2312
|
+
/**
|
|
2313
|
+
* Run the full pipeline: audio extraction -> transcription -> analysis ->
|
|
2314
|
+
* frame extraction -> markdown generation.
|
|
2315
|
+
*/
|
|
2316
|
+
async run() {
|
|
2317
|
+
try {
|
|
2318
|
+
return await this.runPipeline();
|
|
2319
|
+
} finally {
|
|
2320
|
+
await this.cleanup();
|
|
2321
|
+
}
|
|
2322
|
+
}
|
|
2323
|
+
async runPipeline() {
|
|
2324
|
+
const startTime = Date.now();
|
|
2325
|
+
await this.validateVideoFile();
|
|
2326
|
+
if (!(this.options.audioPath && this.options.skipFrames)) {
|
|
2327
|
+
await this.checkFfmpegAvailable();
|
|
2328
|
+
}
|
|
2329
|
+
try {
|
|
2330
|
+
if (!existsSync3(this.options.outputDir)) {
|
|
2331
|
+
mkdirSync2(this.options.outputDir, { recursive: true });
|
|
2332
|
+
}
|
|
2333
|
+
} catch (error) {
|
|
2334
|
+
const code = error.code;
|
|
2335
|
+
if (code === "EACCES") {
|
|
2336
|
+
throw new CLIPipelineError(
|
|
2337
|
+
`Permission denied: cannot create output directory: ${this.options.outputDir}`,
|
|
2338
|
+
"user"
|
|
2339
|
+
);
|
|
2340
|
+
}
|
|
2341
|
+
throw new CLIPipelineError(
|
|
2342
|
+
`Cannot create output directory: ${this.options.outputDir} (${code})`,
|
|
2343
|
+
"system"
|
|
2344
|
+
);
|
|
2345
|
+
}
|
|
2346
|
+
this.progress("Extracting audio...");
|
|
2347
|
+
const audioPath = await this.resolveAudioPath();
|
|
2348
|
+
this.progress("Transcribing (this may take a while)...");
|
|
2349
|
+
const segments = await this.transcribe(audioPath);
|
|
2350
|
+
const analyzer = new TranscriptAnalyzer();
|
|
2351
|
+
const keyMoments = analyzer.analyze(segments);
|
|
2352
|
+
this.log(` Found ${keyMoments.length} key moment(s)`);
|
|
2353
|
+
let extractedFrames = [];
|
|
2354
|
+
if (!this.options.skipFrames) {
|
|
2355
|
+
this.progress("Extracting frames...");
|
|
2356
|
+
extractedFrames = await this.extractFrames(keyMoments, segments);
|
|
2357
|
+
} else {
|
|
2358
|
+
this.log(" Frame extraction skipped (--no-frames)");
|
|
2359
|
+
}
|
|
2360
|
+
this.progress("Generating report...");
|
|
2361
|
+
const result = {
|
|
2362
|
+
transcriptSegments: segments,
|
|
2363
|
+
extractedFrames,
|
|
2364
|
+
reportPath: this.options.outputDir
|
|
2365
|
+
};
|
|
2366
|
+
let reportContent;
|
|
2367
|
+
let reportExtension = ".md";
|
|
2368
|
+
const templateName = this.options.template;
|
|
2369
|
+
if (templateName && templateName !== "markdown") {
|
|
2370
|
+
const template = templateRegistry.get(templateName);
|
|
2371
|
+
if (!template) {
|
|
2372
|
+
const available = templateRegistry.list().join(", ");
|
|
2373
|
+
throw new CLIPipelineError(
|
|
2374
|
+
`Unknown template "${templateName}". Available: ${available}`,
|
|
2375
|
+
"user"
|
|
2376
|
+
);
|
|
2377
|
+
}
|
|
2378
|
+
const output = template.render({ result, sessionDir: this.options.outputDir });
|
|
2379
|
+
reportContent = output.content;
|
|
2380
|
+
reportExtension = output.fileExtension;
|
|
2381
|
+
} else {
|
|
2382
|
+
const generator = new MarkdownGeneratorImpl();
|
|
2383
|
+
reportContent = generator.generateFromPostProcess(result, this.options.outputDir);
|
|
2384
|
+
}
|
|
2385
|
+
const outputFilename = this.generateOutputFilename(reportExtension);
|
|
2386
|
+
const outputPath = join5(this.options.outputDir, outputFilename);
|
|
2387
|
+
try {
|
|
2388
|
+
await writeFile2(outputPath, reportContent, "utf-8");
|
|
2389
|
+
} catch (error) {
|
|
2390
|
+
const code = error.code;
|
|
2391
|
+
throw new CLIPipelineError(
|
|
2392
|
+
`Failed to write output file: ${outputPath}
|
|
2393
|
+
Reason: ${code === "ENOSPC" ? "Disk is full" : error.message}`,
|
|
2394
|
+
"system"
|
|
2395
|
+
);
|
|
2396
|
+
}
|
|
2397
|
+
const durationSeconds = (Date.now() - startTime) / 1e3;
|
|
2398
|
+
return {
|
|
2399
|
+
outputPath,
|
|
2400
|
+
transcriptSegments: segments.length,
|
|
2401
|
+
extractedFrames: extractedFrames.length,
|
|
2402
|
+
durationSeconds
|
|
2403
|
+
};
|
|
2404
|
+
}
|
|
2405
|
+
/**
|
|
2406
|
+
* Abort the pipeline: kill active child processes and clean up temp files.
|
|
2407
|
+
*/
|
|
2408
|
+
async abort() {
|
|
2409
|
+
for (const proc of this.activeProcesses) {
|
|
2410
|
+
proc.kill("SIGTERM");
|
|
2411
|
+
}
|
|
2412
|
+
this.activeProcesses.clear();
|
|
2413
|
+
await this.cleanup();
|
|
2414
|
+
}
|
|
2415
|
+
/**
|
|
2416
|
+
* Clean up temp files created during the pipeline run.
|
|
2417
|
+
*/
|
|
2418
|
+
async cleanup() {
|
|
2419
|
+
for (const file of this.tempFiles) {
|
|
2420
|
+
try {
|
|
2421
|
+
await unlink2(file);
|
|
2422
|
+
} catch {
|
|
2423
|
+
}
|
|
2424
|
+
}
|
|
2425
|
+
this.tempFiles = [];
|
|
2426
|
+
}
|
|
2427
|
+
// ==========================================================================
|
|
2428
|
+
// Private Methods
|
|
2429
|
+
// ==========================================================================
|
|
2430
|
+
/**
|
|
2431
|
+
* Execute a child process while tracking it for cleanup on abort.
|
|
2432
|
+
*/
|
|
2433
|
+
static SAFE_CHILD_ENV = {
|
|
2434
|
+
PATH: process.env.PATH,
|
|
2435
|
+
HOME: process.env.HOME || process.env.USERPROFILE,
|
|
2436
|
+
USERPROFILE: process.env.USERPROFILE,
|
|
2437
|
+
LANG: process.env.LANG,
|
|
2438
|
+
TMPDIR: process.env.TMPDIR || process.env.TEMP,
|
|
2439
|
+
TEMP: process.env.TEMP
|
|
2440
|
+
};
|
|
2441
|
+
execFileTracked(command, args) {
|
|
2442
|
+
return new Promise((resolve4, reject) => {
|
|
2443
|
+
const child = execFileCb4(command, args, { env: _CLIPipeline.SAFE_CHILD_ENV }, (error, stdout, stderr) => {
|
|
2444
|
+
this.activeProcesses.delete(child);
|
|
2445
|
+
if (error) reject(error);
|
|
2446
|
+
else resolve4({ stdout: stdout?.toString() ?? "", stderr: stderr?.toString() ?? "" });
|
|
2447
|
+
});
|
|
2448
|
+
this.activeProcesses.add(child);
|
|
2449
|
+
});
|
|
2450
|
+
}
|
|
2451
|
+
/**
|
|
2452
|
+
* Validate the video file is a real, non-empty file with a video stream.
|
|
2453
|
+
*/
|
|
2454
|
+
async validateVideoFile() {
|
|
2455
|
+
const { videoPath } = this.options;
|
|
2456
|
+
let stats;
|
|
2457
|
+
try {
|
|
2458
|
+
stats = await stat3(videoPath);
|
|
2459
|
+
} catch {
|
|
2460
|
+
throw new CLIPipelineError(`Video file not found: ${videoPath}`, "user");
|
|
2461
|
+
}
|
|
2462
|
+
if (!stats.isFile()) {
|
|
2463
|
+
throw new CLIPipelineError(`Not a regular file: ${videoPath}`, "user");
|
|
2464
|
+
}
|
|
2465
|
+
if (stats.size === 0) {
|
|
2466
|
+
throw new CLIPipelineError(`Video file is empty (0 bytes): ${videoPath}`, "user");
|
|
2467
|
+
}
|
|
2468
|
+
try {
|
|
2469
|
+
const { stdout } = await this.execFileTracked("ffprobe", [
|
|
2470
|
+
"-v",
|
|
2471
|
+
"error",
|
|
2472
|
+
"-select_streams",
|
|
2473
|
+
"v",
|
|
2474
|
+
"-show_entries",
|
|
2475
|
+
"stream=codec_type",
|
|
2476
|
+
"-of",
|
|
2477
|
+
"csv=p=0",
|
|
2478
|
+
videoPath
|
|
2479
|
+
]);
|
|
2480
|
+
if (!stdout.trim().includes("video")) {
|
|
2481
|
+
throw new CLIPipelineError(
|
|
2482
|
+
`No video stream found in file: ${videoPath}`,
|
|
2483
|
+
"user"
|
|
2484
|
+
);
|
|
2485
|
+
}
|
|
2486
|
+
} catch (error) {
|
|
2487
|
+
if (error instanceof CLIPipelineError) throw error;
|
|
2488
|
+
throw new CLIPipelineError(
|
|
2489
|
+
`Cannot read video file (is ffprobe installed?): ${videoPath}`,
|
|
2490
|
+
"system"
|
|
2491
|
+
);
|
|
2492
|
+
}
|
|
2493
|
+
}
|
|
2494
|
+
/**
|
|
2495
|
+
* Check that ffmpeg is available on PATH.
|
|
2496
|
+
*/
|
|
2497
|
+
async checkFfmpegAvailable() {
|
|
2498
|
+
try {
|
|
2499
|
+
await this.execFileTracked("ffmpeg", ["-version"]);
|
|
2500
|
+
} catch {
|
|
2501
|
+
const platform = process.platform;
|
|
2502
|
+
const installHint = platform === "darwin" ? "brew install ffmpeg" : platform === "win32" ? "winget install ffmpeg (or download from https://ffmpeg.org)" : "apt install ffmpeg (or your package manager)";
|
|
2503
|
+
throw new CLIPipelineError(
|
|
2504
|
+
`ffmpeg is required but not found on your system.
|
|
2505
|
+
Install via: ${installHint}
|
|
2506
|
+
Or provide a separate audio file with --audio <file> and --no-frames`,
|
|
2507
|
+
"system"
|
|
2508
|
+
);
|
|
2509
|
+
}
|
|
2510
|
+
}
|
|
2511
|
+
/**
|
|
2512
|
+
* Resolve the audio path. If no separate audio file was provided, probe
|
|
2513
|
+
* the video for an audio track and extract it to a temp WAV file.
|
|
2514
|
+
*/
|
|
2515
|
+
async resolveAudioPath() {
|
|
2516
|
+
if (this.options.audioPath) {
|
|
2517
|
+
if (!existsSync3(this.options.audioPath)) {
|
|
2518
|
+
throw new CLIPipelineError(
|
|
2519
|
+
`Audio file not found: ${this.options.audioPath}`,
|
|
2520
|
+
"user"
|
|
2521
|
+
);
|
|
2522
|
+
}
|
|
2523
|
+
this.log(` Using provided audio: ${this.options.audioPath}`);
|
|
2524
|
+
return this.options.audioPath;
|
|
2525
|
+
}
|
|
2526
|
+
const hasAudio = await this.videoHasAudioTrack(this.options.videoPath);
|
|
2527
|
+
if (!hasAudio) {
|
|
2528
|
+
this.log(" No audio track found in video - transcription will be skipped");
|
|
2529
|
+
return null;
|
|
2530
|
+
}
|
|
2531
|
+
this.log(" Extracting audio from video...");
|
|
2532
|
+
const tempAudioPath = join5(tmpdir2(), `markupr-cli-audio-${randomUUID2()}.wav`);
|
|
2533
|
+
this.tempFiles.push(tempAudioPath);
|
|
2534
|
+
try {
|
|
2535
|
+
await this.execFileTracked("ffmpeg", [
|
|
2536
|
+
"-i",
|
|
2537
|
+
this.options.videoPath,
|
|
2538
|
+
"-vn",
|
|
2539
|
+
"-ar",
|
|
2540
|
+
"16000",
|
|
2541
|
+
"-ac",
|
|
2542
|
+
"1",
|
|
2543
|
+
"-f",
|
|
2544
|
+
"wav",
|
|
2545
|
+
"-acodec",
|
|
2546
|
+
"pcm_f32le",
|
|
2547
|
+
"-y",
|
|
2548
|
+
tempAudioPath
|
|
2549
|
+
]);
|
|
2550
|
+
await chmod2(tempAudioPath, 384).catch(() => {
|
|
2551
|
+
});
|
|
2552
|
+
this.log(" Audio extraction complete");
|
|
2553
|
+
return tempAudioPath;
|
|
2554
|
+
} catch (error) {
|
|
2555
|
+
const msg = error instanceof Error ? error.message : String(error);
|
|
2556
|
+
this.log(` WARNING: Audio extraction failed: ${msg}`);
|
|
2557
|
+
return null;
|
|
2558
|
+
}
|
|
2559
|
+
}
|
|
2560
|
+
/**
|
|
2561
|
+
* Use ffprobe to check whether the video file contains an audio stream.
|
|
2562
|
+
*/
|
|
2563
|
+
async videoHasAudioTrack(videoPath) {
|
|
2564
|
+
try {
|
|
2565
|
+
const { stdout } = await this.execFileTracked("ffprobe", [
|
|
2566
|
+
"-v",
|
|
2567
|
+
"error",
|
|
2568
|
+
"-select_streams",
|
|
2569
|
+
"a",
|
|
2570
|
+
"-show_entries",
|
|
2571
|
+
"stream=codec_type",
|
|
2572
|
+
"-of",
|
|
2573
|
+
"csv=p=0",
|
|
2574
|
+
videoPath
|
|
2575
|
+
]);
|
|
2576
|
+
return stdout.trim().length > 0;
|
|
2577
|
+
} catch {
|
|
2578
|
+
return false;
|
|
2579
|
+
}
|
|
2580
|
+
}
|
|
2581
|
+
/**
|
|
2582
|
+
* Transcribe audio using WhisperService. Falls back gracefully if the
|
|
2583
|
+
* model is not available.
|
|
2584
|
+
*/
|
|
2585
|
+
async transcribe(audioPath) {
|
|
2586
|
+
if (!audioPath) {
|
|
2587
|
+
this.log(" No audio available - skipping transcription");
|
|
2588
|
+
return [];
|
|
2589
|
+
}
|
|
2590
|
+
const whisper = new WhisperService(
|
|
2591
|
+
this.options.whisperModelPath ? { modelPath: this.options.whisperModelPath } : void 0
|
|
2592
|
+
);
|
|
2593
|
+
if (!whisper.isModelAvailable()) {
|
|
2594
|
+
const modelsDir = whisper.getModelsDirectory();
|
|
2595
|
+
this.log(` Whisper model not found at: ${whisper.getConfig().modelPath}`);
|
|
2596
|
+
this.log(` Models directory: ${modelsDir}`);
|
|
2597
|
+
this.log(" Transcription will be skipped. Download a model to enable transcription.");
|
|
2598
|
+
return [];
|
|
2599
|
+
}
|
|
2600
|
+
this.log(` Transcribing with Whisper (model: ${basename3(whisper.getConfig().modelPath)})...`);
|
|
2601
|
+
try {
|
|
2602
|
+
const results = await whisper.transcribeFile(audioPath, (percent) => {
|
|
2603
|
+
if (this.options.verbose) {
|
|
2604
|
+
process.stdout.write(`\r Transcription progress: ${percent}%`);
|
|
2605
|
+
}
|
|
2606
|
+
});
|
|
2607
|
+
if (this.options.verbose && results.length > 0) {
|
|
2608
|
+
process.stdout.write("\n");
|
|
2609
|
+
}
|
|
2610
|
+
const segments = results.map((r) => ({
|
|
2611
|
+
text: r.text,
|
|
2612
|
+
startTime: r.startTime,
|
|
2613
|
+
endTime: r.endTime,
|
|
2614
|
+
confidence: r.confidence
|
|
2615
|
+
}));
|
|
2616
|
+
this.log(` Transcription complete: ${segments.length} segment(s)`);
|
|
2617
|
+
return segments;
|
|
2618
|
+
} catch (error) {
|
|
2619
|
+
const msg = error instanceof Error ? error.message : String(error);
|
|
2620
|
+
this.log(` WARNING: Transcription failed: ${msg}`);
|
|
2621
|
+
return [];
|
|
2622
|
+
}
|
|
2623
|
+
}
|
|
2624
|
+
/**
|
|
2625
|
+
* Extract video frames at key moment timestamps.
|
|
2626
|
+
*/
|
|
2627
|
+
async extractFrames(keyMoments, segments) {
|
|
2628
|
+
if (keyMoments.length === 0) {
|
|
2629
|
+
this.log(" No key moments found - skipping frame extraction");
|
|
2630
|
+
return [];
|
|
2631
|
+
}
|
|
2632
|
+
const extractor = new FrameExtractor();
|
|
2633
|
+
const available = await extractor.checkFfmpeg();
|
|
2634
|
+
if (!available) {
|
|
2635
|
+
this.log(" WARNING: ffmpeg not found - frame extraction skipped");
|
|
2636
|
+
this.log(" Install ffmpeg: brew install ffmpeg (macOS) or apt install ffmpeg (Linux)");
|
|
2637
|
+
return [];
|
|
2638
|
+
}
|
|
2639
|
+
this.log(` Extracting ${keyMoments.length} frame(s)...`);
|
|
2640
|
+
const timestamps = keyMoments.map((m) => m.timestamp);
|
|
2641
|
+
const extractionResult = await extractor.extract({
|
|
2642
|
+
videoPath: this.options.videoPath,
|
|
2643
|
+
timestamps,
|
|
2644
|
+
outputDir: this.options.outputDir
|
|
2645
|
+
});
|
|
2646
|
+
const extractedFrames = extractionResult.frames.filter((f) => f.success).map((frame) => {
|
|
2647
|
+
const moment = keyMoments.find(
|
|
2648
|
+
(m) => Math.abs(m.timestamp - frame.timestamp) < 0.5
|
|
2649
|
+
);
|
|
2650
|
+
const closestSegment = this.findClosestSegment(frame.timestamp, segments);
|
|
2651
|
+
return {
|
|
2652
|
+
path: frame.path,
|
|
2653
|
+
timestamp: frame.timestamp,
|
|
2654
|
+
reason: moment?.reason ?? "Extracted frame",
|
|
2655
|
+
transcriptSegment: closestSegment
|
|
2656
|
+
};
|
|
2657
|
+
});
|
|
2658
|
+
this.log(` Extracted ${extractedFrames.length} frame(s)`);
|
|
2659
|
+
return extractedFrames;
|
|
2660
|
+
}
|
|
2661
|
+
/**
|
|
2662
|
+
* Find the transcript segment closest to a given timestamp.
|
|
2663
|
+
*/
|
|
2664
|
+
findClosestSegment(timestamp, segments) {
|
|
2665
|
+
if (segments.length === 0) return void 0;
|
|
2666
|
+
for (const segment of segments) {
|
|
2667
|
+
if (timestamp >= segment.startTime && timestamp <= segment.endTime) {
|
|
2668
|
+
return segment;
|
|
2669
|
+
}
|
|
2670
|
+
}
|
|
2671
|
+
let closest = segments[0];
|
|
2672
|
+
let minDistance = Math.abs(timestamp - closest.startTime);
|
|
2673
|
+
for (let i = 1; i < segments.length; i++) {
|
|
2674
|
+
const distance = Math.abs(timestamp - segments[i].startTime);
|
|
2675
|
+
if (distance < minDistance) {
|
|
2676
|
+
minDistance = distance;
|
|
2677
|
+
closest = segments[i];
|
|
2678
|
+
}
|
|
2679
|
+
}
|
|
2680
|
+
return closest;
|
|
2681
|
+
}
|
|
2682
|
+
/**
|
|
2683
|
+
* Generate the output filename based on the video filename and current date (UTC).
|
|
2684
|
+
*/
|
|
2685
|
+
generateOutputFilename(extension = ".md") {
|
|
2686
|
+
const videoName = basename3(this.options.videoPath).replace(/\.[^.]+$/, "").replace(/[^a-zA-Z0-9_-]/g, "-").replace(/-+/g, "-");
|
|
2687
|
+
const now = /* @__PURE__ */ new Date();
|
|
2688
|
+
const dateStr = [
|
|
2689
|
+
now.getUTCFullYear(),
|
|
2690
|
+
String(now.getUTCMonth() + 1).padStart(2, "0"),
|
|
2691
|
+
String(now.getUTCDate()).padStart(2, "0")
|
|
2692
|
+
].join("");
|
|
2693
|
+
const timeStr = [
|
|
2694
|
+
String(now.getUTCHours()).padStart(2, "0"),
|
|
2695
|
+
String(now.getUTCMinutes()).padStart(2, "0"),
|
|
2696
|
+
String(now.getUTCSeconds()).padStart(2, "0")
|
|
2697
|
+
].join("");
|
|
2698
|
+
const ext = extension.startsWith(".") ? extension : `.${extension}`;
|
|
2699
|
+
return `${videoName}-feedback-${dateStr}-${timeStr}${ext}`;
|
|
2700
|
+
}
|
|
2701
|
+
};
|
|
2702
|
+
var CLIPipelineError = class extends Error {
|
|
2703
|
+
severity;
|
|
2704
|
+
constructor(message, severity) {
|
|
2705
|
+
super(message);
|
|
2706
|
+
this.name = "CLIPipelineError";
|
|
2707
|
+
this.severity = severity;
|
|
2708
|
+
}
|
|
2709
|
+
};
|
|
2710
|
+
|
|
2711
|
+
// src/mcp/tools/captureWithVoice.ts
|
|
2712
|
+
function register2(server) {
|
|
2713
|
+
server.tool(
|
|
2714
|
+
"capture_with_voice",
|
|
2715
|
+
"Record screen and voice for a specified duration, then run the full markupr pipeline to produce a structured feedback report.",
|
|
2716
|
+
{
|
|
2717
|
+
duration: z2.number().min(3).max(300).describe("Recording duration in seconds (3-300)"),
|
|
2718
|
+
outputDir: z2.string().optional().describe("Output directory (default: session directory)"),
|
|
2719
|
+
skipFrames: z2.boolean().optional().default(false).describe("Skip frame extraction"),
|
|
2720
|
+
template: z2.string().optional().describe(
|
|
2721
|
+
`Output template (default: markdown). Options: ${templateRegistry.list().join(", ")}`
|
|
2722
|
+
)
|
|
2723
|
+
},
|
|
2724
|
+
async ({ duration, outputDir, skipFrames, template }) => {
|
|
2725
|
+
try {
|
|
2726
|
+
const session = await sessionStore.create();
|
|
2727
|
+
const sessionDir = sessionStore.getSessionDir(session.id);
|
|
2728
|
+
const videoPath = join6(sessionDir, "recording.mp4");
|
|
2729
|
+
log(`Starting capture_with_voice: duration=${duration}s`);
|
|
2730
|
+
await record({ duration, outputPath: videoPath });
|
|
2731
|
+
const pipelineOutputDir = outputDir ?? sessionDir;
|
|
2732
|
+
const pipeline = new CLIPipeline(
|
|
2733
|
+
{
|
|
2734
|
+
videoPath,
|
|
2735
|
+
outputDir: pipelineOutputDir,
|
|
2736
|
+
skipFrames,
|
|
2737
|
+
template,
|
|
2738
|
+
verbose: false
|
|
2739
|
+
},
|
|
2740
|
+
(msg) => log(msg)
|
|
2741
|
+
);
|
|
2742
|
+
const result = await pipeline.run();
|
|
2743
|
+
await sessionStore.update(session.id, {
|
|
2744
|
+
status: "complete",
|
|
2745
|
+
endTime: Date.now(),
|
|
2746
|
+
videoPath,
|
|
2747
|
+
reportPath: result.outputPath
|
|
2748
|
+
});
|
|
2749
|
+
return {
|
|
2750
|
+
content: [
|
|
2751
|
+
{
|
|
2752
|
+
type: "text",
|
|
2753
|
+
text: [
|
|
2754
|
+
`Recording complete: ${duration} seconds captured`,
|
|
2755
|
+
"Pipeline results:",
|
|
2756
|
+
` Transcript segments: ${result.transcriptSegments}`,
|
|
2757
|
+
` Extracted frames: ${result.extractedFrames}`,
|
|
2758
|
+
` Processing time: ${result.durationSeconds.toFixed(1)}s`,
|
|
2759
|
+
"",
|
|
2760
|
+
`Report: ${result.outputPath}`,
|
|
2761
|
+
`OUTPUT:${result.outputPath}`
|
|
2762
|
+
].join("\n")
|
|
2763
|
+
}
|
|
2764
|
+
]
|
|
2765
|
+
};
|
|
2766
|
+
} catch (error) {
|
|
2767
|
+
return {
|
|
2768
|
+
content: [{ type: "text", text: `Error: ${error.message}` }],
|
|
2769
|
+
isError: true
|
|
2770
|
+
};
|
|
2771
|
+
}
|
|
2772
|
+
}
|
|
2773
|
+
);
|
|
2774
|
+
}
|
|
2775
|
+
|
|
2776
|
+
// src/mcp/tools/analyzeVideo.ts
|
|
2777
|
+
import { z as z3 } from "zod";
|
|
2778
|
+
import { stat as stat4 } from "fs/promises";
|
|
2779
|
+
function register3(server) {
|
|
2780
|
+
server.tool(
|
|
2781
|
+
"analyze_video",
|
|
2782
|
+
"Process an existing video file through the markupr pipeline. Generates a structured markdown report with transcript, key moments, and extracted frames.",
|
|
2783
|
+
{
|
|
2784
|
+
videoPath: z3.string().describe("Absolute path to the video file"),
|
|
2785
|
+
audioPath: z3.string().optional().describe("Separate audio file path (if not embedded)"),
|
|
2786
|
+
outputDir: z3.string().optional().describe("Output directory (default: session directory)"),
|
|
2787
|
+
skipFrames: z3.boolean().optional().default(false).describe("Skip frame extraction"),
|
|
2788
|
+
template: z3.string().optional().describe(
|
|
2789
|
+
`Output template (default: markdown). Options: ${templateRegistry.list().join(", ")}`
|
|
2790
|
+
)
|
|
2791
|
+
},
|
|
2792
|
+
async ({ videoPath, audioPath, outputDir, skipFrames, template }) => {
|
|
2793
|
+
try {
|
|
2794
|
+
let fileStats;
|
|
2795
|
+
try {
|
|
2796
|
+
fileStats = await stat4(videoPath);
|
|
2797
|
+
} catch {
|
|
2798
|
+
return {
|
|
2799
|
+
content: [{ type: "text", text: `Error: Video file not found: ${videoPath}` }],
|
|
2800
|
+
isError: true
|
|
2801
|
+
};
|
|
2802
|
+
}
|
|
2803
|
+
if (!fileStats.isFile() || fileStats.size === 0) {
|
|
2804
|
+
return {
|
|
2805
|
+
content: [{ type: "text", text: `Error: Video file is empty or not a regular file: ${videoPath}` }],
|
|
2806
|
+
isError: true
|
|
2807
|
+
};
|
|
2808
|
+
}
|
|
2809
|
+
if (audioPath) {
|
|
2810
|
+
try {
|
|
2811
|
+
const audioStats = await stat4(audioPath);
|
|
2812
|
+
if (!audioStats.isFile() || audioStats.size === 0) {
|
|
2813
|
+
return {
|
|
2814
|
+
content: [{ type: "text", text: `Error: Audio file is empty or not a regular file: ${audioPath}` }],
|
|
2815
|
+
isError: true
|
|
2816
|
+
};
|
|
2817
|
+
}
|
|
2818
|
+
} catch {
|
|
2819
|
+
return {
|
|
2820
|
+
content: [{ type: "text", text: `Error: Audio file not found: ${audioPath}` }],
|
|
2821
|
+
isError: true
|
|
2822
|
+
};
|
|
2823
|
+
}
|
|
2824
|
+
}
|
|
2825
|
+
const session = await sessionStore.create();
|
|
2826
|
+
const sessionDir = sessionStore.getSessionDir(session.id);
|
|
2827
|
+
const pipelineOutputDir = outputDir ?? sessionDir;
|
|
2828
|
+
log(`Analyzing video: ${videoPath}`);
|
|
2829
|
+
const pipeline = new CLIPipeline(
|
|
2830
|
+
{
|
|
2831
|
+
videoPath,
|
|
2832
|
+
audioPath,
|
|
2833
|
+
outputDir: pipelineOutputDir,
|
|
2834
|
+
skipFrames,
|
|
2835
|
+
template,
|
|
2836
|
+
verbose: false
|
|
2837
|
+
},
|
|
2838
|
+
(msg) => log(msg)
|
|
2839
|
+
);
|
|
2840
|
+
const result = await pipeline.run();
|
|
2841
|
+
await sessionStore.update(session.id, {
|
|
2842
|
+
status: "complete",
|
|
2843
|
+
endTime: Date.now(),
|
|
2844
|
+
videoPath,
|
|
2845
|
+
reportPath: result.outputPath
|
|
2846
|
+
});
|
|
2847
|
+
return {
|
|
2848
|
+
content: [
|
|
2849
|
+
{
|
|
2850
|
+
type: "text",
|
|
2851
|
+
text: [
|
|
2852
|
+
`Video analysis complete`,
|
|
2853
|
+
"Pipeline results:",
|
|
2854
|
+
` Transcript segments: ${result.transcriptSegments}`,
|
|
2855
|
+
` Extracted frames: ${result.extractedFrames}`,
|
|
2856
|
+
` Processing time: ${result.durationSeconds.toFixed(1)}s`,
|
|
2857
|
+
"",
|
|
2858
|
+
`Report: ${result.outputPath}`,
|
|
2859
|
+
`OUTPUT:${result.outputPath}`
|
|
2860
|
+
].join("\n")
|
|
2861
|
+
}
|
|
2862
|
+
]
|
|
2863
|
+
};
|
|
2864
|
+
} catch (error) {
|
|
2865
|
+
return {
|
|
2866
|
+
content: [{ type: "text", text: `Error: ${error.message}` }],
|
|
2867
|
+
isError: true
|
|
2868
|
+
};
|
|
2869
|
+
}
|
|
2870
|
+
}
|
|
2871
|
+
);
|
|
2872
|
+
}
|
|
2873
|
+
|
|
2874
|
+
// src/mcp/tools/analyzeScreenshot.ts
|
|
2875
|
+
import { z as z4 } from "zod";
|
|
2876
|
+
import { join as join7 } from "path";
|
|
2877
|
+
import { readFile as readFile3, unlink as unlink3 } from "fs/promises";
|
|
2878
|
+
import { tmpdir as tmpdir3 } from "os";
|
|
2879
|
+
import { randomUUID as randomUUID3 } from "crypto";
|
|
2880
|
+
function register4(server) {
|
|
2881
|
+
server.tool(
|
|
2882
|
+
"analyze_screenshot",
|
|
2883
|
+
"Take a screenshot and return it as an image for the AI to analyze visually. Returns the image data directly for vision analysis.",
|
|
2884
|
+
{
|
|
2885
|
+
display: z4.number().optional().default(1).describe("Display number (1-indexed)"),
|
|
2886
|
+
question: z4.string().optional().describe("What to look for in the screenshot")
|
|
2887
|
+
},
|
|
2888
|
+
async ({ display, question }) => {
|
|
2889
|
+
const tempPath = join7(tmpdir3(), `markupr-mcp-screenshot-${randomUUID3()}.png`);
|
|
2890
|
+
try {
|
|
2891
|
+
log(`Capturing screenshot for analysis: display=${display}`);
|
|
2892
|
+
await capture({ display, outputPath: tempPath });
|
|
2893
|
+
await optimize(tempPath);
|
|
2894
|
+
const imageBuffer = await readFile3(tempPath);
|
|
2895
|
+
const base64Data = imageBuffer.toString("base64");
|
|
2896
|
+
const timestamp = (/* @__PURE__ */ new Date()).toISOString();
|
|
2897
|
+
const description = question ? `Screenshot of display ${display} captured at ${timestamp}. Question: ${question}` : `Screenshot of display ${display} captured at ${timestamp}`;
|
|
2898
|
+
return {
|
|
2899
|
+
content: [
|
|
2900
|
+
{
|
|
2901
|
+
type: "image",
|
|
2902
|
+
data: base64Data,
|
|
2903
|
+
mimeType: "image/png"
|
|
2904
|
+
},
|
|
2905
|
+
{
|
|
2906
|
+
type: "text",
|
|
2907
|
+
text: description
|
|
2908
|
+
}
|
|
2909
|
+
]
|
|
2910
|
+
};
|
|
2911
|
+
} catch (error) {
|
|
2912
|
+
return {
|
|
2913
|
+
content: [{ type: "text", text: `Error: ${error.message}` }],
|
|
2914
|
+
isError: true
|
|
2915
|
+
};
|
|
2916
|
+
} finally {
|
|
2917
|
+
await unlink3(tempPath).catch(() => {
|
|
2918
|
+
});
|
|
2919
|
+
}
|
|
2920
|
+
}
|
|
2921
|
+
);
|
|
2922
|
+
}
|
|
2923
|
+
|
|
2924
|
+
// src/mcp/tools/startRecording.ts
|
|
2925
|
+
import { z as z5 } from "zod";
|
|
2926
|
+
import { join as join8 } from "path";
|
|
2927
|
+
|
|
2928
|
+
// src/mcp/session/ActiveRecording.ts
|
|
2929
|
+
var ActiveRecording = class {
|
|
2930
|
+
current = null;
|
|
2931
|
+
/**
|
|
2932
|
+
* Start tracking a recording. Throws if one is already in progress.
|
|
2933
|
+
*/
|
|
2934
|
+
start(sessionId, process2, videoPath) {
|
|
2935
|
+
if (this.current !== null) {
|
|
2936
|
+
throw new Error(
|
|
2937
|
+
`Recording already in progress (session: ${this.current.sessionId}). Stop it before starting a new one.`
|
|
2938
|
+
);
|
|
2939
|
+
}
|
|
2940
|
+
this.current = { sessionId, process: process2, videoPath };
|
|
2941
|
+
log(`Active recording started: ${sessionId}`);
|
|
2942
|
+
}
|
|
2943
|
+
/**
|
|
2944
|
+
* Stop tracking the current recording. Returns the session ID and video path.
|
|
2945
|
+
* Throws if no recording is active.
|
|
2946
|
+
*/
|
|
2947
|
+
stop() {
|
|
2948
|
+
if (this.current === null) {
|
|
2949
|
+
throw new Error("No recording in progress.");
|
|
2950
|
+
}
|
|
2951
|
+
const { sessionId, videoPath } = this.current;
|
|
2952
|
+
this.current = null;
|
|
2953
|
+
log(`Active recording stopped: ${sessionId}`);
|
|
2954
|
+
return { sessionId, videoPath };
|
|
2955
|
+
}
|
|
2956
|
+
/**
|
|
2957
|
+
* Check if a recording is currently active.
|
|
2958
|
+
*/
|
|
2959
|
+
isRecording() {
|
|
2960
|
+
return this.current !== null;
|
|
2961
|
+
}
|
|
2962
|
+
/**
|
|
2963
|
+
* Get the current recording state (read-only), or null.
|
|
2964
|
+
*/
|
|
2965
|
+
getCurrent() {
|
|
2966
|
+
return this.current;
|
|
2967
|
+
}
|
|
2968
|
+
};
|
|
2969
|
+
var activeRecording = new ActiveRecording();
|
|
2970
|
+
|
|
2971
|
+
// src/mcp/tools/startRecording.ts
|
|
2972
|
+
function register5(server) {
|
|
2973
|
+
server.tool(
|
|
2974
|
+
"start_recording",
|
|
2975
|
+
"Start a long-form screen+voice recording session. Returns a session ID that can be used with stop_recording.",
|
|
2976
|
+
{
|
|
2977
|
+
label: z5.string().optional().describe("Session label for organization")
|
|
2978
|
+
},
|
|
2979
|
+
async ({ label }) => {
|
|
2980
|
+
try {
|
|
2981
|
+
if (activeRecording.isRecording()) {
|
|
2982
|
+
const current = activeRecording.getCurrent();
|
|
2983
|
+
return {
|
|
2984
|
+
content: [
|
|
2985
|
+
{
|
|
2986
|
+
type: "text",
|
|
2987
|
+
text: `Error: Recording already in progress (session: ${current?.sessionId}). Stop it before starting a new one.`
|
|
2988
|
+
}
|
|
2989
|
+
],
|
|
2990
|
+
isError: true
|
|
2991
|
+
};
|
|
2992
|
+
}
|
|
2993
|
+
const session = await sessionStore.create(label);
|
|
2994
|
+
const sessionDir = sessionStore.getSessionDir(session.id);
|
|
2995
|
+
const videoPath = join8(sessionDir, "recording.mp4");
|
|
2996
|
+
log(`Starting long-form recording: session=${session.id}`);
|
|
2997
|
+
const process2 = start({ outputPath: videoPath });
|
|
2998
|
+
activeRecording.start(session.id, process2, videoPath);
|
|
2999
|
+
return {
|
|
3000
|
+
content: [
|
|
3001
|
+
{
|
|
3002
|
+
type: "text",
|
|
3003
|
+
text: [
|
|
3004
|
+
"Recording started.",
|
|
3005
|
+
`Session ID: ${session.id}`,
|
|
3006
|
+
"Status: recording",
|
|
3007
|
+
"Use stop_recording to end and process the recording."
|
|
3008
|
+
].join("\n")
|
|
3009
|
+
}
|
|
3010
|
+
]
|
|
3011
|
+
};
|
|
3012
|
+
} catch (error) {
|
|
3013
|
+
return {
|
|
3014
|
+
content: [{ type: "text", text: `Error: ${error.message}` }],
|
|
3015
|
+
isError: true
|
|
3016
|
+
};
|
|
3017
|
+
}
|
|
3018
|
+
}
|
|
3019
|
+
);
|
|
3020
|
+
}
|
|
3021
|
+
|
|
3022
|
+
// src/mcp/tools/stopRecording.ts
|
|
3023
|
+
import { z as z6 } from "zod";
|
|
3024
|
+
function register6(server) {
|
|
3025
|
+
server.tool(
|
|
3026
|
+
"stop_recording",
|
|
3027
|
+
"Stop an active recording and run the full markupr pipeline on the captured video.",
|
|
3028
|
+
{
|
|
3029
|
+
sessionId: z6.string().optional().describe("Session ID (default: current active recording)"),
|
|
3030
|
+
skipFrames: z6.boolean().optional().default(false).describe("Skip frame extraction"),
|
|
3031
|
+
template: z6.string().optional().describe(
|
|
3032
|
+
`Output template (default: markdown). Options: ${templateRegistry.list().join(", ")}`
|
|
3033
|
+
)
|
|
3034
|
+
},
|
|
3035
|
+
async ({ sessionId: _requestedSessionId, skipFrames, template }) => {
|
|
3036
|
+
try {
|
|
3037
|
+
if (!activeRecording.isRecording()) {
|
|
3038
|
+
return {
|
|
3039
|
+
content: [{ type: "text", text: "Error: No recording in progress." }],
|
|
3040
|
+
isError: true
|
|
3041
|
+
};
|
|
3042
|
+
}
|
|
3043
|
+
const current = activeRecording.getCurrent();
|
|
3044
|
+
if (!current) {
|
|
3045
|
+
return {
|
|
3046
|
+
content: [{ type: "text", text: "Error: No recording in progress." }],
|
|
3047
|
+
isError: true
|
|
3048
|
+
};
|
|
3049
|
+
}
|
|
3050
|
+
log(`Stopping recording: session=${current.sessionId}`);
|
|
3051
|
+
await stop(current.process);
|
|
3052
|
+
const { sessionId, videoPath } = activeRecording.stop();
|
|
3053
|
+
await sessionStore.update(sessionId, { status: "processing" });
|
|
3054
|
+
const sessionDir = sessionStore.getSessionDir(sessionId);
|
|
3055
|
+
const pipeline = new CLIPipeline(
|
|
3056
|
+
{
|
|
3057
|
+
videoPath,
|
|
3058
|
+
outputDir: sessionDir,
|
|
3059
|
+
skipFrames,
|
|
3060
|
+
template,
|
|
3061
|
+
verbose: false
|
|
3062
|
+
},
|
|
3063
|
+
(msg) => log(msg)
|
|
3064
|
+
);
|
|
3065
|
+
const result = await pipeline.run();
|
|
3066
|
+
await sessionStore.update(sessionId, {
|
|
3067
|
+
status: "complete",
|
|
3068
|
+
endTime: Date.now(),
|
|
3069
|
+
videoPath,
|
|
3070
|
+
reportPath: result.outputPath
|
|
3071
|
+
});
|
|
3072
|
+
return {
|
|
3073
|
+
content: [
|
|
3074
|
+
{
|
|
3075
|
+
type: "text",
|
|
3076
|
+
text: [
|
|
3077
|
+
`Recording stopped and processed.`,
|
|
3078
|
+
`Session: ${sessionId}`,
|
|
3079
|
+
"Pipeline results:",
|
|
3080
|
+
` Transcript segments: ${result.transcriptSegments}`,
|
|
3081
|
+
` Extracted frames: ${result.extractedFrames}`,
|
|
3082
|
+
` Processing time: ${result.durationSeconds.toFixed(1)}s`,
|
|
3083
|
+
"",
|
|
3084
|
+
`Report: ${result.outputPath}`,
|
|
3085
|
+
`OUTPUT:${result.outputPath}`
|
|
3086
|
+
].join("\n")
|
|
3087
|
+
}
|
|
3088
|
+
]
|
|
3089
|
+
};
|
|
3090
|
+
} catch (error) {
|
|
3091
|
+
return {
|
|
3092
|
+
content: [{ type: "text", text: `Error: ${error.message}` }],
|
|
3093
|
+
isError: true
|
|
3094
|
+
};
|
|
3095
|
+
}
|
|
3096
|
+
}
|
|
3097
|
+
);
|
|
3098
|
+
}
|
|
3099
|
+
|
|
3100
|
+
// src/mcp/tools/pushToLinear.ts
|
|
3101
|
+
import { z as z7 } from "zod";
|
|
3102
|
+
import { stat as stat5 } from "fs/promises";
|
|
3103
|
+
|
|
3104
|
+
// src/integrations/linear/LinearIssueCreator.ts
|
|
3105
|
+
import { readFile as readFile4 } from "fs/promises";
|
|
3106
|
+
|
|
3107
|
+
// src/integrations/linear/types.ts
|
|
3108
|
+
var SEVERITY_TO_PRIORITY = {
|
|
3109
|
+
Critical: 1,
|
|
3110
|
+
High: 2,
|
|
3111
|
+
Medium: 3,
|
|
3112
|
+
Low: 4
|
|
3113
|
+
};
|
|
3114
|
+
var CATEGORY_TO_LABEL = {
|
|
3115
|
+
"Bug": "Bug",
|
|
3116
|
+
"UX Issue": "Improvement",
|
|
3117
|
+
"Suggestion": "Feature",
|
|
3118
|
+
"Performance": "Bug",
|
|
3119
|
+
"Question": "Feature",
|
|
3120
|
+
"General": "Feature"
|
|
3121
|
+
};
|
|
3122
|
+
|
|
3123
|
+
// src/integrations/linear/LinearIssueCreator.ts
|
|
3124
|
+
var LINEAR_API_URL = "https://api.linear.app/graphql";
|
|
3125
|
+
var LinearIssueCreator = class {
|
|
3126
|
+
token;
|
|
3127
|
+
constructor(token) {
|
|
3128
|
+
this.token = token;
|
|
3129
|
+
}
|
|
3130
|
+
/**
|
|
3131
|
+
* Push a markupr report to Linear, creating one issue per feedback item.
|
|
3132
|
+
*/
|
|
3133
|
+
async pushReport(reportPath, options) {
|
|
3134
|
+
const markdown = await readFile4(reportPath, "utf-8");
|
|
3135
|
+
const items = parseMarkdownReport(markdown);
|
|
3136
|
+
const team = await this.resolveTeam(options.teamKey);
|
|
3137
|
+
const labels = await this.getTeamLabels(team.id);
|
|
3138
|
+
const result = {
|
|
3139
|
+
teamKey: options.teamKey,
|
|
3140
|
+
totalItems: items.length,
|
|
3141
|
+
created: 0,
|
|
3142
|
+
failed: 0,
|
|
3143
|
+
issues: [],
|
|
3144
|
+
dryRun: options.dryRun ?? false
|
|
3145
|
+
};
|
|
3146
|
+
for (const item of items) {
|
|
3147
|
+
const labelName = CATEGORY_TO_LABEL[item.category] ?? "Feature";
|
|
3148
|
+
const matchingLabel = labels.find(
|
|
3149
|
+
(l) => l.name.toLowerCase() === labelName.toLowerCase()
|
|
3150
|
+
);
|
|
3151
|
+
const issueInput = {
|
|
3152
|
+
title: `[${item.id}] ${item.title}`,
|
|
3153
|
+
description: this.buildIssueDescription(item),
|
|
3154
|
+
teamId: team.id,
|
|
3155
|
+
priority: SEVERITY_TO_PRIORITY[item.severity] ?? 3,
|
|
3156
|
+
labelIds: matchingLabel ? [matchingLabel.id] : void 0,
|
|
3157
|
+
projectId: options.projectName ? await this.resolveProjectId(team.id, options.projectName) : void 0
|
|
3158
|
+
};
|
|
3159
|
+
if (options.dryRun) {
|
|
3160
|
+
result.issues.push({
|
|
3161
|
+
success: true,
|
|
3162
|
+
issueId: `dry-run-${item.id}`,
|
|
3163
|
+
identifier: `DRY-${item.id}`,
|
|
3164
|
+
issueUrl: `https://linear.app/dry-run/${item.id}`
|
|
3165
|
+
});
|
|
3166
|
+
result.created++;
|
|
3167
|
+
continue;
|
|
3168
|
+
}
|
|
3169
|
+
const issueResult = await this.createIssue(issueInput);
|
|
3170
|
+
result.issues.push(issueResult);
|
|
3171
|
+
if (issueResult.success) {
|
|
3172
|
+
result.created++;
|
|
3173
|
+
} else {
|
|
3174
|
+
result.failed++;
|
|
3175
|
+
}
|
|
3176
|
+
}
|
|
3177
|
+
return result;
|
|
3178
|
+
}
|
|
3179
|
+
/**
|
|
3180
|
+
* Create a single Linear issue via GraphQL.
|
|
3181
|
+
*/
|
|
3182
|
+
async createIssue(input) {
|
|
3183
|
+
const mutation = `
|
|
3184
|
+
mutation IssueCreate($input: IssueCreateInput!) {
|
|
3185
|
+
issueCreate(input: $input) {
|
|
3186
|
+
success
|
|
3187
|
+
issue {
|
|
3188
|
+
id
|
|
3189
|
+
url
|
|
3190
|
+
identifier
|
|
3191
|
+
}
|
|
3192
|
+
}
|
|
3193
|
+
}
|
|
3194
|
+
`;
|
|
3195
|
+
const variables = {
|
|
3196
|
+
input: {
|
|
3197
|
+
title: input.title,
|
|
3198
|
+
description: input.description,
|
|
3199
|
+
teamId: input.teamId,
|
|
3200
|
+
priority: input.priority,
|
|
3201
|
+
...input.labelIds && { labelIds: input.labelIds },
|
|
3202
|
+
...input.projectId && { projectId: input.projectId }
|
|
3203
|
+
}
|
|
3204
|
+
};
|
|
3205
|
+
try {
|
|
3206
|
+
const data = await this.graphql(mutation, variables);
|
|
3207
|
+
if (data.issueCreate.success) {
|
|
3208
|
+
return {
|
|
3209
|
+
success: true,
|
|
3210
|
+
issueId: data.issueCreate.issue.id,
|
|
3211
|
+
issueUrl: data.issueCreate.issue.url,
|
|
3212
|
+
identifier: data.issueCreate.issue.identifier
|
|
3213
|
+
};
|
|
3214
|
+
}
|
|
3215
|
+
return { success: false, error: "Linear API returned success: false" };
|
|
3216
|
+
} catch (error) {
|
|
3217
|
+
return {
|
|
3218
|
+
success: false,
|
|
3219
|
+
error: error instanceof Error ? error.message : String(error)
|
|
3220
|
+
};
|
|
3221
|
+
}
|
|
3222
|
+
}
|
|
3223
|
+
/**
|
|
3224
|
+
* Resolve a team key (e.g., "ENG") to a team ID.
|
|
3225
|
+
*/
|
|
3226
|
+
async resolveTeam(teamKey) {
|
|
3227
|
+
const query = `
|
|
3228
|
+
query Teams {
|
|
3229
|
+
teams {
|
|
3230
|
+
nodes {
|
|
3231
|
+
id
|
|
3232
|
+
key
|
|
3233
|
+
name
|
|
3234
|
+
}
|
|
3235
|
+
}
|
|
3236
|
+
}
|
|
3237
|
+
`;
|
|
3238
|
+
const data = await this.graphql(query);
|
|
3239
|
+
const team = data.teams.nodes.find(
|
|
3240
|
+
(t) => t.key.toLowerCase() === teamKey.toLowerCase()
|
|
3241
|
+
);
|
|
3242
|
+
if (!team) {
|
|
3243
|
+
const available = data.teams.nodes.map((t) => t.key).join(", ");
|
|
3244
|
+
throw new Error(
|
|
3245
|
+
`Team "${teamKey}" not found. Available teams: ${available}`
|
|
3246
|
+
);
|
|
3247
|
+
}
|
|
3248
|
+
return team;
|
|
3249
|
+
}
|
|
3250
|
+
/**
|
|
3251
|
+
* Get all labels for a team.
|
|
3252
|
+
*/
|
|
3253
|
+
async getTeamLabels(teamId) {
|
|
3254
|
+
const query = `
|
|
3255
|
+
query TeamLabels($teamId: String!) {
|
|
3256
|
+
team(id: $teamId) {
|
|
3257
|
+
labels {
|
|
3258
|
+
nodes {
|
|
3259
|
+
id
|
|
3260
|
+
name
|
|
3261
|
+
}
|
|
3262
|
+
}
|
|
3263
|
+
}
|
|
3264
|
+
}
|
|
3265
|
+
`;
|
|
3266
|
+
const data = await this.graphql(query, { teamId });
|
|
3267
|
+
return data.team.labels.nodes;
|
|
3268
|
+
}
|
|
3269
|
+
/**
|
|
3270
|
+
* Resolve a project name to a project ID within a team.
|
|
3271
|
+
*/
|
|
3272
|
+
async resolveProjectId(teamId, projectName) {
|
|
3273
|
+
const query = `
|
|
3274
|
+
query Projects($teamId: String!) {
|
|
3275
|
+
team(id: $teamId) {
|
|
3276
|
+
projects {
|
|
3277
|
+
nodes {
|
|
3278
|
+
id
|
|
3279
|
+
name
|
|
3280
|
+
}
|
|
3281
|
+
}
|
|
3282
|
+
}
|
|
3283
|
+
}
|
|
3284
|
+
`;
|
|
3285
|
+
const data = await this.graphql(query, { teamId });
|
|
3286
|
+
const project = data.team.projects.nodes.find(
|
|
3287
|
+
(p) => p.name.toLowerCase() === projectName.toLowerCase()
|
|
3288
|
+
);
|
|
3289
|
+
return project?.id;
|
|
3290
|
+
}
|
|
3291
|
+
/**
|
|
3292
|
+
* Build markdown description for a Linear issue from a feedback item.
|
|
3293
|
+
*/
|
|
3294
|
+
buildIssueDescription(item) {
|
|
3295
|
+
let desc = `## markupr Feedback: ${item.id}
|
|
3296
|
+
|
|
3297
|
+
`;
|
|
3298
|
+
desc += `**Severity:** ${item.severity}
|
|
3299
|
+
`;
|
|
3300
|
+
desc += `**Category:** ${item.category}
|
|
3301
|
+
`;
|
|
3302
|
+
desc += `**Timestamp:** ${item.timestamp}
|
|
3303
|
+
|
|
3304
|
+
`;
|
|
3305
|
+
desc += `### Description
|
|
3306
|
+
|
|
3307
|
+
${item.description}
|
|
3308
|
+
|
|
3309
|
+
`;
|
|
3310
|
+
if (item.suggestedAction) {
|
|
3311
|
+
desc += `### Suggested Action
|
|
3312
|
+
|
|
3313
|
+
${item.suggestedAction}
|
|
3314
|
+
|
|
3315
|
+
`;
|
|
3316
|
+
}
|
|
3317
|
+
if (item.screenshotPaths.length > 0) {
|
|
3318
|
+
desc += `### Screenshots
|
|
3319
|
+
|
|
3320
|
+
`;
|
|
3321
|
+
desc += `_${item.screenshotPaths.length} screenshot(s) captured during session._
|
|
3322
|
+
`;
|
|
3323
|
+
for (const path4 of item.screenshotPaths) {
|
|
3324
|
+
desc += `- \`${path4}\`
|
|
3325
|
+
`;
|
|
3326
|
+
}
|
|
3327
|
+
}
|
|
3328
|
+
desc += `
|
|
3329
|
+
---
|
|
3330
|
+
*Created by [markupr](https://markupr.com)*`;
|
|
3331
|
+
return desc;
|
|
3332
|
+
}
|
|
3333
|
+
/**
|
|
3334
|
+
* Execute a GraphQL request against the Linear API.
|
|
3335
|
+
*/
|
|
3336
|
+
async graphql(query, variables) {
|
|
3337
|
+
const response = await fetch(LINEAR_API_URL, {
|
|
3338
|
+
method: "POST",
|
|
3339
|
+
headers: {
|
|
3340
|
+
"Content-Type": "application/json",
|
|
3341
|
+
Authorization: this.token
|
|
3342
|
+
},
|
|
3343
|
+
body: JSON.stringify({ query, variables })
|
|
3344
|
+
});
|
|
3345
|
+
if (!response.ok) {
|
|
3346
|
+
throw new Error(
|
|
3347
|
+
`Linear API error: ${response.status} ${response.statusText}`
|
|
3348
|
+
);
|
|
3349
|
+
}
|
|
3350
|
+
const json = await response.json();
|
|
3351
|
+
if (json.errors && json.errors.length > 0) {
|
|
3352
|
+
throw new Error(`Linear GraphQL error: ${json.errors[0].message}`);
|
|
3353
|
+
}
|
|
3354
|
+
if (!json.data) {
|
|
3355
|
+
throw new Error("Linear API returned no data");
|
|
3356
|
+
}
|
|
3357
|
+
return json.data;
|
|
3358
|
+
}
|
|
3359
|
+
};
|
|
3360
|
+
function parseMarkdownReport(markdown) {
|
|
3361
|
+
const items = [];
|
|
3362
|
+
const itemPattern = /^### (FB-\d+): (.+)$/gm;
|
|
3363
|
+
const matches = [];
|
|
3364
|
+
let match;
|
|
3365
|
+
while ((match = itemPattern.exec(markdown)) !== null) {
|
|
3366
|
+
matches.push({ index: match.index, id: match[1], title: match[2] });
|
|
3367
|
+
}
|
|
3368
|
+
for (let i = 0; i < matches.length; i++) {
|
|
3369
|
+
const start2 = matches[i].index;
|
|
3370
|
+
const end = i + 1 < matches.length ? matches[i + 1].index : markdown.length;
|
|
3371
|
+
const section = markdown.slice(start2, end);
|
|
3372
|
+
const severity = extractField(section, "Severity") || "Medium";
|
|
3373
|
+
const category = extractField(section, "Type") || "General";
|
|
3374
|
+
const timestamp = extractField(section, "Timestamp") || "00:00";
|
|
3375
|
+
const description = extractBlockquote(section);
|
|
3376
|
+
const screenshotPaths = extractScreenshots(section);
|
|
3377
|
+
const suggestedAction = extractSuggestedAction(section);
|
|
3378
|
+
items.push({
|
|
3379
|
+
id: matches[i].id,
|
|
3380
|
+
title: matches[i].title,
|
|
3381
|
+
severity,
|
|
3382
|
+
category,
|
|
3383
|
+
timestamp,
|
|
3384
|
+
description,
|
|
3385
|
+
screenshotPaths,
|
|
3386
|
+
suggestedAction
|
|
3387
|
+
});
|
|
3388
|
+
}
|
|
3389
|
+
return items;
|
|
3390
|
+
}
|
|
3391
|
+
function extractField(section, fieldName) {
|
|
3392
|
+
const pattern = new RegExp(`\\*\\*${fieldName}:\\*\\*\\s*(.+)`, "m");
|
|
3393
|
+
const match = section.match(pattern);
|
|
3394
|
+
return match ? match[1].trim() : "";
|
|
3395
|
+
}
|
|
3396
|
+
function extractBlockquote(section) {
|
|
3397
|
+
const whatHappened = section.match(/#### What Happened\s*\n([\s\S]*?)(?=\n####|\n---)/);
|
|
3398
|
+
if (!whatHappened) return "";
|
|
3399
|
+
const lines = whatHappened[1].split("\n").filter((line) => line.startsWith(">")).map((line) => line.replace(/^>\s*/, "").trim());
|
|
3400
|
+
return lines.join(" ").trim();
|
|
3401
|
+
}
|
|
3402
|
+
function extractScreenshots(section) {
|
|
3403
|
+
const paths = [];
|
|
3404
|
+
const pattern = /!\[.*?\]\((.+?)\)/g;
|
|
3405
|
+
let match;
|
|
3406
|
+
while ((match = pattern.exec(section)) !== null) {
|
|
3407
|
+
paths.push(match[1]);
|
|
3408
|
+
}
|
|
3409
|
+
return paths;
|
|
3410
|
+
}
|
|
3411
|
+
function extractSuggestedAction(section) {
|
|
3412
|
+
const actionSection = section.match(/#### Suggested Next Step\s*\n-\s*(.+)/);
|
|
3413
|
+
return actionSection ? actionSection[1].trim() : "";
|
|
3414
|
+
}
|
|
3415
|
+
|
|
3416
|
+
// src/mcp/tools/pushToLinear.ts
|
|
3417
|
+
function register7(server) {
|
|
3418
|
+
server.tool(
|
|
3419
|
+
"push_to_linear",
|
|
3420
|
+
"Push a markupr feedback report to Linear. Creates one issue per feedback item with priority mapping, labels, and full context.",
|
|
3421
|
+
{
|
|
3422
|
+
reportPath: z7.string().describe("Absolute path to the markupr markdown report"),
|
|
3423
|
+
teamKey: z7.string().describe('Linear team key (e.g., "ENG", "DES")'),
|
|
3424
|
+
token: z7.string().optional().describe("Linear API key (or set LINEAR_API_KEY env var)"),
|
|
3425
|
+
projectName: z7.string().optional().describe("Linear project name to assign issues to"),
|
|
3426
|
+
dryRun: z7.boolean().optional().default(false).describe("Preview what would be created without actually creating issues")
|
|
3427
|
+
},
|
|
3428
|
+
async ({ reportPath, teamKey, token, projectName, dryRun }) => {
|
|
3429
|
+
try {
|
|
3430
|
+
const apiToken = token || process.env.LINEAR_API_KEY;
|
|
3431
|
+
if (!apiToken) {
|
|
3432
|
+
return {
|
|
3433
|
+
content: [
|
|
3434
|
+
{
|
|
3435
|
+
type: "text",
|
|
3436
|
+
text: "Error: No Linear API token provided. Pass via `token` parameter or set LINEAR_API_KEY env var."
|
|
3437
|
+
}
|
|
3438
|
+
],
|
|
3439
|
+
isError: true
|
|
3440
|
+
};
|
|
3441
|
+
}
|
|
3442
|
+
try {
|
|
3443
|
+
const stats = await stat5(reportPath);
|
|
3444
|
+
if (!stats.isFile() || stats.size === 0) {
|
|
3445
|
+
return {
|
|
3446
|
+
content: [
|
|
3447
|
+
{
|
|
3448
|
+
type: "text",
|
|
3449
|
+
text: `Error: Report file is empty or not a regular file: ${reportPath}`
|
|
3450
|
+
}
|
|
3451
|
+
],
|
|
3452
|
+
isError: true
|
|
3453
|
+
};
|
|
3454
|
+
}
|
|
3455
|
+
} catch {
|
|
3456
|
+
return {
|
|
3457
|
+
content: [
|
|
3458
|
+
{
|
|
3459
|
+
type: "text",
|
|
3460
|
+
text: `Error: Report file not found: ${reportPath}`
|
|
3461
|
+
}
|
|
3462
|
+
],
|
|
3463
|
+
isError: true
|
|
3464
|
+
};
|
|
3465
|
+
}
|
|
3466
|
+
log(`Pushing report to Linear: ${reportPath} \u2192 team ${teamKey}`);
|
|
3467
|
+
const creator = new LinearIssueCreator(apiToken);
|
|
3468
|
+
const result = await creator.pushReport(reportPath, {
|
|
3469
|
+
token: apiToken,
|
|
3470
|
+
teamKey,
|
|
3471
|
+
projectName,
|
|
3472
|
+
dryRun
|
|
3473
|
+
});
|
|
3474
|
+
const lines = [
|
|
3475
|
+
dryRun ? "DRY RUN \u2014 no issues created" : "Push to Linear complete",
|
|
3476
|
+
"",
|
|
3477
|
+
`Team: ${teamKey}`,
|
|
3478
|
+
`Total items: ${result.totalItems}`,
|
|
3479
|
+
`Created: ${result.created}`,
|
|
3480
|
+
`Failed: ${result.failed}`,
|
|
3481
|
+
""
|
|
3482
|
+
];
|
|
3483
|
+
for (const issue of result.issues) {
|
|
3484
|
+
if (issue.success) {
|
|
3485
|
+
lines.push(
|
|
3486
|
+
` ${issue.identifier}: ${issue.issueUrl}`
|
|
3487
|
+
);
|
|
3488
|
+
} else {
|
|
3489
|
+
lines.push(` FAILED: ${issue.error}`);
|
|
3490
|
+
}
|
|
3491
|
+
}
|
|
3492
|
+
return {
|
|
3493
|
+
content: [{ type: "text", text: lines.join("\n") }]
|
|
3494
|
+
};
|
|
3495
|
+
} catch (error) {
|
|
3496
|
+
return {
|
|
3497
|
+
content: [
|
|
3498
|
+
{
|
|
3499
|
+
type: "text",
|
|
3500
|
+
text: `Error: ${error.message}`
|
|
3501
|
+
}
|
|
3502
|
+
],
|
|
3503
|
+
isError: true
|
|
3504
|
+
};
|
|
3505
|
+
}
|
|
3506
|
+
}
|
|
3507
|
+
);
|
|
3508
|
+
}
|
|
3509
|
+
|
|
3510
|
+
// src/mcp/tools/pushToGitHub.ts
|
|
3511
|
+
import { z as z8 } from "zod";
|
|
3512
|
+
import { stat as stat6 } from "fs/promises";
|
|
3513
|
+
|
|
3514
|
+
// src/integrations/github/GitHubIssueCreator.ts
|
|
3515
|
+
import { readFile as readFile5 } from "fs/promises";
|
|
3516
|
+
|
|
3517
|
+
// src/integrations/github/types.ts
|
|
3518
|
+
var CATEGORY_LABELS = {
|
|
3519
|
+
Bug: { name: "bug", color: "d73a4a", description: "Something isn't working" },
|
|
3520
|
+
"UX Issue": { name: "ux", color: "e4e669", description: "User experience issue" },
|
|
3521
|
+
Suggestion: { name: "enhancement", color: "a2eeef", description: "New feature or request" },
|
|
3522
|
+
Performance: { name: "performance", color: "f9d0c4", description: "Performance issue" },
|
|
3523
|
+
Question: { name: "question", color: "d876e3", description: "Further information is requested" },
|
|
3524
|
+
General: { name: "feedback", color: "c5def5", description: "General feedback" }
|
|
3525
|
+
};
|
|
3526
|
+
var SEVERITY_LABELS = {
|
|
3527
|
+
Critical: { name: "priority: critical", color: "b60205", description: "Critical priority" },
|
|
3528
|
+
High: { name: "priority: high", color: "d93f0b", description: "High priority" },
|
|
3529
|
+
Medium: { name: "priority: medium", color: "fbca04", description: "Medium priority" },
|
|
3530
|
+
Low: { name: "priority: low", color: "0e8a16", description: "Low priority" }
|
|
3531
|
+
};
|
|
3532
|
+
var MARKUPR_LABEL = {
|
|
3533
|
+
name: "markupr",
|
|
3534
|
+
color: "6f42c1",
|
|
3535
|
+
description: "Created from markupr feedback session"
|
|
3536
|
+
};
|
|
3537
|
+
|
|
3538
|
+
// src/integrations/github/GitHubIssueCreator.ts
|
|
3539
|
+
var GITHUB_API = "https://api.github.com";
|
|
3540
|
+
async function resolveAuth(explicitToken) {
|
|
3541
|
+
if (explicitToken) {
|
|
3542
|
+
return { token: explicitToken, source: "flag" };
|
|
3543
|
+
}
|
|
3544
|
+
const envToken = process.env.GITHUB_TOKEN;
|
|
3545
|
+
if (envToken) {
|
|
3546
|
+
return { token: envToken, source: "env" };
|
|
3547
|
+
}
|
|
3548
|
+
try {
|
|
3549
|
+
const { execSync } = await import("child_process");
|
|
3550
|
+
const ghToken = execSync("gh auth token", { encoding: "utf-8", timeout: 5e3 }).trim();
|
|
3551
|
+
if (ghToken) {
|
|
3552
|
+
return { token: ghToken, source: "gh-cli" };
|
|
3553
|
+
}
|
|
3554
|
+
} catch {
|
|
3555
|
+
}
|
|
3556
|
+
throw new Error(
|
|
3557
|
+
"No GitHub token found. Provide one via:\n --token <token>\n GITHUB_TOKEN environment variable\n gh auth login (GitHub CLI)"
|
|
3558
|
+
);
|
|
3559
|
+
}
|
|
3560
|
+
function parseMarkuprReport(markdown) {
|
|
3561
|
+
const items = [];
|
|
3562
|
+
const itemPattern = /### (FB-\d{3}): (.+?)(?=\n)/g;
|
|
3563
|
+
let match;
|
|
3564
|
+
while ((match = itemPattern.exec(markdown)) !== null) {
|
|
3565
|
+
const id = match[1];
|
|
3566
|
+
const title = match[2].trim();
|
|
3567
|
+
const startIndex = match.index;
|
|
3568
|
+
const rest = markdown.slice(startIndex + match[0].length);
|
|
3569
|
+
const nextSectionMatch = rest.match(/\n### FB-\d{3}:|(?=\n## [A-Z])/);
|
|
3570
|
+
const itemBlock = nextSectionMatch ? rest.slice(0, nextSectionMatch.index) : rest;
|
|
3571
|
+
const severity = extractField2(itemBlock, "Severity") || "Medium";
|
|
3572
|
+
const category = extractField2(itemBlock, "Type") || "General";
|
|
3573
|
+
const timestamp = extractField2(itemBlock, "Timestamp") || "00:00";
|
|
3574
|
+
const transcription = extractTranscription(itemBlock);
|
|
3575
|
+
const screenshotPaths = extractScreenshots2(itemBlock);
|
|
3576
|
+
const suggestedAction = extractSuggestedAction2(itemBlock);
|
|
3577
|
+
items.push({
|
|
3578
|
+
id,
|
|
3579
|
+
title,
|
|
3580
|
+
category,
|
|
3581
|
+
severity,
|
|
3582
|
+
timestamp,
|
|
3583
|
+
transcription,
|
|
3584
|
+
screenshotPaths,
|
|
3585
|
+
suggestedAction
|
|
3586
|
+
});
|
|
3587
|
+
}
|
|
3588
|
+
return items;
|
|
3589
|
+
}
|
|
3590
|
+
function extractField2(block, fieldName) {
|
|
3591
|
+
const pattern = new RegExp(`\\*\\*${fieldName}:\\*\\*\\s*(.+)`);
|
|
3592
|
+
const match = block.match(pattern);
|
|
3593
|
+
return match ? match[1].trim() : void 0;
|
|
3594
|
+
}
|
|
3595
|
+
function extractTranscription(block) {
|
|
3596
|
+
const whatHappenedIdx = block.indexOf("#### What Happened");
|
|
3597
|
+
if (whatHappenedIdx === -1) return "";
|
|
3598
|
+
const afterHeading = block.slice(whatHappenedIdx);
|
|
3599
|
+
const nextHeading = afterHeading.indexOf("\n####", 5);
|
|
3600
|
+
const section = nextHeading !== -1 ? afterHeading.slice(0, nextHeading) : afterHeading;
|
|
3601
|
+
const lines = section.split("\n");
|
|
3602
|
+
const quotedLines = [];
|
|
3603
|
+
for (const line of lines) {
|
|
3604
|
+
const trimmed = line.trim();
|
|
3605
|
+
if (trimmed.startsWith("> ")) {
|
|
3606
|
+
quotedLines.push(trimmed.slice(2));
|
|
3607
|
+
} else if (trimmed === ">") {
|
|
3608
|
+
quotedLines.push("");
|
|
3609
|
+
}
|
|
3610
|
+
}
|
|
3611
|
+
return quotedLines.join(" ").trim();
|
|
3612
|
+
}
|
|
3613
|
+
function extractScreenshots2(block) {
|
|
3614
|
+
const paths = [];
|
|
3615
|
+
const pattern = /!\[.*?\]\((.+?)\)/g;
|
|
3616
|
+
let match;
|
|
3617
|
+
while ((match = pattern.exec(block)) !== null) {
|
|
3618
|
+
paths.push(match[1]);
|
|
3619
|
+
}
|
|
3620
|
+
return paths;
|
|
3621
|
+
}
|
|
3622
|
+
function extractSuggestedAction2(block) {
|
|
3623
|
+
const idx = block.indexOf("#### Suggested Next Step");
|
|
3624
|
+
if (idx === -1) return "";
|
|
3625
|
+
const afterHeading = block.slice(idx + "#### Suggested Next Step".length);
|
|
3626
|
+
const nextSection = afterHeading.indexOf("\n---");
|
|
3627
|
+
const section = nextSection !== -1 ? afterHeading.slice(0, nextSection) : afterHeading;
|
|
3628
|
+
const lines = section.split("\n");
|
|
3629
|
+
for (const line of lines) {
|
|
3630
|
+
const trimmed = line.trim();
|
|
3631
|
+
if (trimmed.startsWith("- ")) {
|
|
3632
|
+
return trimmed.slice(2);
|
|
3633
|
+
}
|
|
3634
|
+
}
|
|
3635
|
+
return "";
|
|
3636
|
+
}
|
|
3637
|
+
function formatIssueBody(item, reportPath) {
|
|
3638
|
+
let body = `## ${item.id}: ${item.title}
|
|
3639
|
+
|
|
3640
|
+
`;
|
|
3641
|
+
body += `| Field | Value |
|
|
3642
|
+
|-------|-------|
|
|
3643
|
+
`;
|
|
3644
|
+
body += `| **Severity** | ${item.severity} |
|
|
3645
|
+
`;
|
|
3646
|
+
body += `| **Category** | ${item.category} |
|
|
3647
|
+
`;
|
|
3648
|
+
body += `| **Timestamp** | ${item.timestamp} |
|
|
3649
|
+
|
|
3650
|
+
`;
|
|
3651
|
+
body += `### What Happened
|
|
3652
|
+
|
|
3653
|
+
`;
|
|
3654
|
+
body += `> ${item.transcription}
|
|
3655
|
+
|
|
3656
|
+
`;
|
|
3657
|
+
if (item.screenshotPaths.length > 0) {
|
|
3658
|
+
body += `### Screenshots
|
|
3659
|
+
|
|
3660
|
+
`;
|
|
3661
|
+
body += `_${item.screenshotPaths.length} screenshot(s) captured \u2014 see the markupr report for images._
|
|
3662
|
+
|
|
3663
|
+
`;
|
|
3664
|
+
}
|
|
3665
|
+
if (item.suggestedAction) {
|
|
3666
|
+
body += `### Suggested Action
|
|
3667
|
+
|
|
3668
|
+
`;
|
|
3669
|
+
body += `${item.suggestedAction}
|
|
3670
|
+
|
|
3671
|
+
`;
|
|
3672
|
+
}
|
|
3673
|
+
body += `---
|
|
3674
|
+
`;
|
|
3675
|
+
if (reportPath) {
|
|
3676
|
+
body += `_Source: \`${reportPath}\`_
|
|
3677
|
+
`;
|
|
3678
|
+
}
|
|
3679
|
+
body += `_Created by [markupr](https://markupr.com)_
|
|
3680
|
+
`;
|
|
3681
|
+
return body;
|
|
3682
|
+
}
|
|
3683
|
+
function getLabelsForItem(item) {
|
|
3684
|
+
const labels = [MARKUPR_LABEL.name];
|
|
3685
|
+
const categoryLabel = CATEGORY_LABELS[item.category];
|
|
3686
|
+
if (categoryLabel) {
|
|
3687
|
+
labels.push(categoryLabel.name);
|
|
3688
|
+
}
|
|
3689
|
+
const severityLabel = SEVERITY_LABELS[item.severity];
|
|
3690
|
+
if (severityLabel) {
|
|
3691
|
+
labels.push(severityLabel.name);
|
|
3692
|
+
}
|
|
3693
|
+
return labels;
|
|
3694
|
+
}
|
|
3695
|
+
function collectRequiredLabels(items) {
|
|
3696
|
+
const seen = /* @__PURE__ */ new Set();
|
|
3697
|
+
const labels = [];
|
|
3698
|
+
seen.add(MARKUPR_LABEL.name);
|
|
3699
|
+
labels.push(MARKUPR_LABEL);
|
|
3700
|
+
for (const item of items) {
|
|
3701
|
+
const catLabel = CATEGORY_LABELS[item.category];
|
|
3702
|
+
if (catLabel && !seen.has(catLabel.name)) {
|
|
3703
|
+
seen.add(catLabel.name);
|
|
3704
|
+
labels.push(catLabel);
|
|
3705
|
+
}
|
|
3706
|
+
const sevLabel = SEVERITY_LABELS[item.severity];
|
|
3707
|
+
if (sevLabel && !seen.has(sevLabel.name)) {
|
|
3708
|
+
seen.add(sevLabel.name);
|
|
3709
|
+
labels.push(sevLabel);
|
|
3710
|
+
}
|
|
3711
|
+
}
|
|
3712
|
+
return labels;
|
|
3713
|
+
}
|
|
3714
|
+
var GitHubAPIClient = class {
|
|
3715
|
+
baseUrl;
|
|
3716
|
+
headers;
|
|
3717
|
+
constructor(auth, baseUrl = GITHUB_API) {
|
|
3718
|
+
this.baseUrl = baseUrl;
|
|
3719
|
+
this.headers = {
|
|
3720
|
+
Authorization: `Bearer ${auth.token}`,
|
|
3721
|
+
Accept: "application/vnd.github+json",
|
|
3722
|
+
"X-GitHub-Api-Version": "2022-11-28",
|
|
3723
|
+
"Content-Type": "application/json",
|
|
3724
|
+
"User-Agent": "markupr-github-integration"
|
|
3725
|
+
};
|
|
3726
|
+
}
|
|
3727
|
+
async createIssue(repo, input) {
|
|
3728
|
+
const url = `${this.baseUrl}/repos/${repo.owner}/${repo.repo}/issues`;
|
|
3729
|
+
const response = await fetch(url, {
|
|
3730
|
+
method: "POST",
|
|
3731
|
+
headers: this.headers,
|
|
3732
|
+
body: JSON.stringify({
|
|
3733
|
+
title: input.title,
|
|
3734
|
+
body: input.body,
|
|
3735
|
+
labels: input.labels
|
|
3736
|
+
})
|
|
3737
|
+
});
|
|
3738
|
+
if (!response.ok) {
|
|
3739
|
+
const text = await response.text();
|
|
3740
|
+
throw new Error(`GitHub API error (${response.status}): ${text}`);
|
|
3741
|
+
}
|
|
3742
|
+
const data = await response.json();
|
|
3743
|
+
return {
|
|
3744
|
+
number: data.number,
|
|
3745
|
+
url: data.html_url,
|
|
3746
|
+
title: data.title
|
|
3747
|
+
};
|
|
3748
|
+
}
|
|
3749
|
+
async ensureLabel(repo, label) {
|
|
3750
|
+
const url = `${this.baseUrl}/repos/${repo.owner}/${repo.repo}/labels`;
|
|
3751
|
+
const checkUrl = `${url}/${encodeURIComponent(label.name)}`;
|
|
3752
|
+
const checkResponse = await fetch(checkUrl, {
|
|
3753
|
+
method: "GET",
|
|
3754
|
+
headers: this.headers
|
|
3755
|
+
});
|
|
3756
|
+
if (checkResponse.ok) {
|
|
3757
|
+
return false;
|
|
3758
|
+
}
|
|
3759
|
+
const createResponse = await fetch(url, {
|
|
3760
|
+
method: "POST",
|
|
3761
|
+
headers: this.headers,
|
|
3762
|
+
body: JSON.stringify({
|
|
3763
|
+
name: label.name,
|
|
3764
|
+
color: label.color,
|
|
3765
|
+
description: label.description
|
|
3766
|
+
})
|
|
3767
|
+
});
|
|
3768
|
+
if (!createResponse.ok) {
|
|
3769
|
+
if (createResponse.status === 422) {
|
|
3770
|
+
return false;
|
|
3771
|
+
}
|
|
3772
|
+
const text = await createResponse.text();
|
|
3773
|
+
throw new Error(`Failed to create label "${label.name}": ${text}`);
|
|
3774
|
+
}
|
|
3775
|
+
return true;
|
|
3776
|
+
}
|
|
3777
|
+
async verifyAccess(repo) {
|
|
3778
|
+
const url = `${this.baseUrl}/repos/${repo.owner}/${repo.repo}`;
|
|
3779
|
+
const response = await fetch(url, {
|
|
3780
|
+
method: "GET",
|
|
3781
|
+
headers: this.headers
|
|
3782
|
+
});
|
|
3783
|
+
if (!response.ok) {
|
|
3784
|
+
if (response.status === 404) {
|
|
3785
|
+
throw new Error(`Repository ${repo.owner}/${repo.repo} not found (or no access)`);
|
|
3786
|
+
}
|
|
3787
|
+
if (response.status === 401) {
|
|
3788
|
+
throw new Error("GitHub token is invalid or expired");
|
|
3789
|
+
}
|
|
3790
|
+
throw new Error(`Failed to access repository (${response.status})`);
|
|
3791
|
+
}
|
|
3792
|
+
}
|
|
3793
|
+
};
|
|
3794
|
+
async function pushToGitHub(options) {
|
|
3795
|
+
const { repo, auth, reportPath, dryRun = false, items: filterIds } = options;
|
|
3796
|
+
const markdown = await readFile5(reportPath, "utf-8");
|
|
3797
|
+
let items = parseMarkuprReport(markdown);
|
|
3798
|
+
if (items.length === 0) {
|
|
3799
|
+
throw new Error("No feedback items found in the report. Is this a valid markupr report?");
|
|
3800
|
+
}
|
|
3801
|
+
if (filterIds && filterIds.length > 0) {
|
|
3802
|
+
const filterSet = new Set(filterIds.map((id) => id.toUpperCase()));
|
|
3803
|
+
items = items.filter((item) => filterSet.has(item.id));
|
|
3804
|
+
if (items.length === 0) {
|
|
3805
|
+
throw new Error(`None of the specified items (${filterIds.join(", ")}) found in the report`);
|
|
3806
|
+
}
|
|
3807
|
+
}
|
|
3808
|
+
const result = {
|
|
3809
|
+
created: [],
|
|
3810
|
+
labelsCreated: [],
|
|
3811
|
+
errors: [],
|
|
3812
|
+
dryRun
|
|
3813
|
+
};
|
|
3814
|
+
if (dryRun) {
|
|
3815
|
+
for (const item of items) {
|
|
3816
|
+
const labels = getLabelsForItem(item);
|
|
3817
|
+
result.created.push({
|
|
3818
|
+
number: 0,
|
|
3819
|
+
url: "",
|
|
3820
|
+
title: `[${item.id}] ${item.title}`
|
|
3821
|
+
});
|
|
3822
|
+
}
|
|
3823
|
+
result.labelsCreated = collectRequiredLabels(items).map((l) => l.name);
|
|
3824
|
+
return result;
|
|
3825
|
+
}
|
|
3826
|
+
const client = new GitHubAPIClient(auth);
|
|
3827
|
+
await client.verifyAccess(repo);
|
|
3828
|
+
const requiredLabels = collectRequiredLabels(items);
|
|
3829
|
+
for (const label of requiredLabels) {
|
|
3830
|
+
try {
|
|
3831
|
+
const created = await client.ensureLabel(repo, label);
|
|
3832
|
+
if (created) {
|
|
3833
|
+
result.labelsCreated.push(label.name);
|
|
3834
|
+
}
|
|
3835
|
+
} catch (err) {
|
|
3836
|
+
const message = err instanceof Error ? err.message : String(err);
|
|
3837
|
+
result.errors.push({ itemId: "labels", error: message });
|
|
3838
|
+
}
|
|
3839
|
+
}
|
|
3840
|
+
for (const item of items) {
|
|
3841
|
+
try {
|
|
3842
|
+
const labels = getLabelsForItem(item);
|
|
3843
|
+
const body = formatIssueBody(item, reportPath);
|
|
3844
|
+
const issueResult = await client.createIssue(repo, {
|
|
3845
|
+
title: `[${item.id}] ${item.title}`,
|
|
3846
|
+
body,
|
|
3847
|
+
labels
|
|
3848
|
+
});
|
|
3849
|
+
result.created.push(issueResult);
|
|
3850
|
+
} catch (err) {
|
|
3851
|
+
const message = err instanceof Error ? err.message : String(err);
|
|
3852
|
+
result.errors.push({ itemId: item.id, error: message });
|
|
3853
|
+
}
|
|
3854
|
+
}
|
|
3855
|
+
return result;
|
|
3856
|
+
}
|
|
3857
|
+
function parseRepoString(repoStr) {
|
|
3858
|
+
const parts = repoStr.split("/");
|
|
3859
|
+
if (parts.length !== 2 || !parts[0] || !parts[1]) {
|
|
3860
|
+
throw new Error(`Invalid repository format: "${repoStr}". Expected "owner/repo".`);
|
|
3861
|
+
}
|
|
3862
|
+
return { owner: parts[0], repo: parts[1] };
|
|
3863
|
+
}
|
|
3864
|
+
|
|
3865
|
+
// src/mcp/tools/pushToGitHub.ts
|
|
3866
|
+
function register8(server) {
|
|
3867
|
+
server.tool(
|
|
3868
|
+
"push_to_github",
|
|
3869
|
+
"Create GitHub issues from a markupr feedback report. Each feedback item becomes a separate issue with labels and structured markdown.",
|
|
3870
|
+
{
|
|
3871
|
+
reportPath: z8.string().describe("Absolute path to the markupr markdown report"),
|
|
3872
|
+
repo: z8.string().describe('Target GitHub repository in "owner/repo" format'),
|
|
3873
|
+
token: z8.string().optional().describe("GitHub token (falls back to GITHUB_TOKEN env or gh CLI)"),
|
|
3874
|
+
items: z8.array(z8.string()).optional().describe("Specific FB-XXX item IDs to push (default: all)"),
|
|
3875
|
+
dryRun: z8.boolean().optional().default(false).describe("Preview what would be created without creating")
|
|
3876
|
+
},
|
|
3877
|
+
async ({ reportPath, repo, token, items, dryRun }) => {
|
|
3878
|
+
try {
|
|
3879
|
+
try {
|
|
3880
|
+
const stats = await stat6(reportPath);
|
|
3881
|
+
if (!stats.isFile()) {
|
|
3882
|
+
return {
|
|
3883
|
+
content: [{ type: "text", text: `Error: Not a file: ${reportPath}` }],
|
|
3884
|
+
isError: true
|
|
3885
|
+
};
|
|
3886
|
+
}
|
|
3887
|
+
} catch {
|
|
3888
|
+
return {
|
|
3889
|
+
content: [{ type: "text", text: `Error: Report not found: ${reportPath}` }],
|
|
3890
|
+
isError: true
|
|
3891
|
+
};
|
|
3892
|
+
}
|
|
3893
|
+
let parsedRepo;
|
|
3894
|
+
try {
|
|
3895
|
+
parsedRepo = parseRepoString(repo);
|
|
3896
|
+
} catch (err) {
|
|
3897
|
+
return {
|
|
3898
|
+
content: [{ type: "text", text: `Error: ${err.message}` }],
|
|
3899
|
+
isError: true
|
|
3900
|
+
};
|
|
3901
|
+
}
|
|
3902
|
+
let auth;
|
|
3903
|
+
try {
|
|
3904
|
+
auth = await resolveAuth(token);
|
|
3905
|
+
} catch (err) {
|
|
3906
|
+
return {
|
|
3907
|
+
content: [{ type: "text", text: `Error: ${err.message}` }],
|
|
3908
|
+
isError: true
|
|
3909
|
+
};
|
|
3910
|
+
}
|
|
3911
|
+
log(`Pushing to GitHub: ${repo} (auth: ${auth.source}, dryRun: ${dryRun})`);
|
|
3912
|
+
const result = await pushToGitHub({
|
|
3913
|
+
repo: parsedRepo,
|
|
3914
|
+
auth,
|
|
3915
|
+
reportPath,
|
|
3916
|
+
dryRun,
|
|
3917
|
+
items
|
|
3918
|
+
});
|
|
3919
|
+
const lines = [];
|
|
3920
|
+
if (dryRun) {
|
|
3921
|
+
lines.push(`Dry run \u2014 ${result.created.length} issue(s) would be created:`);
|
|
3922
|
+
lines.push("");
|
|
3923
|
+
for (const issue of result.created) {
|
|
3924
|
+
lines.push(` - ${issue.title}`);
|
|
3925
|
+
}
|
|
3926
|
+
if (result.labelsCreated.length > 0) {
|
|
3927
|
+
lines.push("");
|
|
3928
|
+
lines.push(`Labels to create: ${result.labelsCreated.join(", ")}`);
|
|
3929
|
+
}
|
|
3930
|
+
} else {
|
|
3931
|
+
lines.push(`Created ${result.created.length} issue(s):`);
|
|
3932
|
+
lines.push("");
|
|
3933
|
+
for (const issue of result.created) {
|
|
3934
|
+
lines.push(` - #${issue.number}: ${issue.title}`);
|
|
3935
|
+
lines.push(` ${issue.url}`);
|
|
3936
|
+
}
|
|
3937
|
+
if (result.labelsCreated.length > 0) {
|
|
3938
|
+
lines.push("");
|
|
3939
|
+
lines.push(`Labels created: ${result.labelsCreated.join(", ")}`);
|
|
3940
|
+
}
|
|
3941
|
+
}
|
|
3942
|
+
if (result.errors.length > 0) {
|
|
3943
|
+
lines.push("");
|
|
3944
|
+
lines.push(`Errors (${result.errors.length}):`);
|
|
3945
|
+
for (const err of result.errors) {
|
|
3946
|
+
lines.push(` - ${err.itemId}: ${err.error}`);
|
|
3947
|
+
}
|
|
3948
|
+
}
|
|
3949
|
+
return {
|
|
3950
|
+
content: [{ type: "text", text: lines.join("\n") }]
|
|
3951
|
+
};
|
|
3952
|
+
} catch (error) {
|
|
3953
|
+
return {
|
|
3954
|
+
content: [{ type: "text", text: `Error: ${error.message}` }],
|
|
3955
|
+
isError: true
|
|
3956
|
+
};
|
|
3957
|
+
}
|
|
3958
|
+
}
|
|
3959
|
+
);
|
|
3960
|
+
}
|
|
3961
|
+
|
|
3962
|
+
// src/mcp/resources/sessionResource.ts
|
|
3963
|
+
import { ResourceTemplate } from "@modelcontextprotocol/sdk/server/mcp.js";
|
|
3964
|
+
function registerResources(server) {
|
|
3965
|
+
server.resource(
|
|
3966
|
+
"latest-session",
|
|
3967
|
+
"session://latest",
|
|
3968
|
+
{ description: "Metadata for the most recent MCP recording session", mimeType: "application/json" },
|
|
3969
|
+
async () => {
|
|
3970
|
+
const session = await sessionStore.getLatest();
|
|
3971
|
+
if (!session) {
|
|
3972
|
+
return {
|
|
3973
|
+
contents: [{
|
|
3974
|
+
uri: "session://latest",
|
|
3975
|
+
mimeType: "application/json",
|
|
3976
|
+
text: JSON.stringify({ error: "No sessions found" })
|
|
3977
|
+
}]
|
|
3978
|
+
};
|
|
3979
|
+
}
|
|
3980
|
+
return {
|
|
3981
|
+
contents: [{
|
|
3982
|
+
uri: "session://latest",
|
|
3983
|
+
mimeType: "application/json",
|
|
3984
|
+
text: JSON.stringify(session, null, 2)
|
|
3985
|
+
}]
|
|
3986
|
+
};
|
|
3987
|
+
}
|
|
3988
|
+
);
|
|
3989
|
+
server.resource(
|
|
3990
|
+
"session-by-id",
|
|
3991
|
+
new ResourceTemplate("session://{id}", { list: void 0 }),
|
|
3992
|
+
{ description: "Metadata for a specific MCP recording session", mimeType: "application/json" },
|
|
3993
|
+
async (uri, variables) => {
|
|
3994
|
+
const id = variables.id;
|
|
3995
|
+
const session = await sessionStore.get(id);
|
|
3996
|
+
if (!session) {
|
|
3997
|
+
return {
|
|
3998
|
+
contents: [{
|
|
3999
|
+
uri: uri.href,
|
|
4000
|
+
mimeType: "application/json",
|
|
4001
|
+
text: JSON.stringify({ error: `Session not found: ${id}` })
|
|
4002
|
+
}]
|
|
4003
|
+
};
|
|
4004
|
+
}
|
|
4005
|
+
return {
|
|
4006
|
+
contents: [{
|
|
4007
|
+
uri: uri.href,
|
|
4008
|
+
mimeType: "application/json",
|
|
4009
|
+
text: JSON.stringify(session, null, 2)
|
|
4010
|
+
}]
|
|
4011
|
+
};
|
|
4012
|
+
}
|
|
4013
|
+
);
|
|
4014
|
+
}
|
|
4015
|
+
|
|
4016
|
+
// src/mcp/server.ts
|
|
4017
|
+
var VERSION = true ? "2.5.0" : "0.0.0-dev";
|
|
4018
|
+
function createServer() {
|
|
4019
|
+
const server = new McpServer2({
|
|
4020
|
+
name: "markupr",
|
|
4021
|
+
version: VERSION
|
|
4022
|
+
});
|
|
4023
|
+
register(server);
|
|
4024
|
+
register2(server);
|
|
4025
|
+
register3(server);
|
|
4026
|
+
register4(server);
|
|
4027
|
+
register5(server);
|
|
4028
|
+
register6(server);
|
|
4029
|
+
register7(server);
|
|
4030
|
+
register8(server);
|
|
4031
|
+
registerResources(server);
|
|
4032
|
+
return server;
|
|
4033
|
+
}
|
|
4034
|
+
|
|
4035
|
+
// src/mcp/index.ts
|
|
4036
|
+
var VERSION2 = true ? "2.5.0" : "0.0.0-dev";
|
|
4037
|
+
log(`markupr MCP server v${VERSION2} starting...`);
|
|
4038
|
+
process.on("uncaughtException", (error) => {
|
|
4039
|
+
log(`Uncaught exception: ${error instanceof Error ? error.message : String(error)}`);
|
|
4040
|
+
process.exit(1);
|
|
4041
|
+
});
|
|
4042
|
+
process.on("unhandledRejection", (reason) => {
|
|
4043
|
+
log(`Unhandled rejection: ${reason instanceof Error ? reason.message : String(reason)}`);
|
|
4044
|
+
process.exit(1);
|
|
4045
|
+
});
|
|
4046
|
+
try {
|
|
4047
|
+
const server = createServer();
|
|
4048
|
+
const transport = new StdioServerTransport();
|
|
4049
|
+
await server.connect(transport);
|
|
4050
|
+
} catch (error) {
|
|
4051
|
+
log(`Failed to start MCP server: ${error instanceof Error ? error.message : String(error)}`);
|
|
4052
|
+
process.exit(1);
|
|
4053
|
+
}
|