@storyteller-platform/align 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE.txt +21 -0
- package/README.md +3 -0
- package/dist/align/align.cjs +525 -0
- package/dist/align/align.d.cts +58 -0
- package/dist/align/align.d.ts +58 -0
- package/dist/align/align.js +458 -0
- package/dist/align/fuzzy.cjs +164 -0
- package/dist/align/fuzzy.d.cts +6 -0
- package/dist/align/fuzzy.d.ts +6 -0
- package/dist/align/fuzzy.js +141 -0
- package/dist/align/getSentenceRanges.cjs +304 -0
- package/dist/align/getSentenceRanges.d.cts +31 -0
- package/dist/align/getSentenceRanges.d.ts +31 -0
- package/dist/align/getSentenceRanges.js +277 -0
- package/dist/align/parse.cjs +63 -0
- package/dist/align/parse.d.cts +30 -0
- package/dist/align/parse.d.ts +30 -0
- package/dist/align/parse.js +51 -0
- package/dist/chunk-BIEQXUOY.js +50 -0
- package/dist/cli/bin.cjs +368 -0
- package/dist/cli/bin.d.cts +1 -0
- package/dist/cli/bin.d.ts +1 -0
- package/dist/cli/bin.js +319 -0
- package/dist/common/ffmpeg.cjs +232 -0
- package/dist/common/ffmpeg.d.cts +33 -0
- package/dist/common/ffmpeg.d.ts +33 -0
- package/dist/common/ffmpeg.js +196 -0
- package/dist/common/logging.cjs +45 -0
- package/dist/common/logging.d.cts +5 -0
- package/dist/common/logging.d.ts +5 -0
- package/dist/common/logging.js +12 -0
- package/dist/common/parse.cjs +73 -0
- package/dist/common/parse.d.cts +28 -0
- package/dist/common/parse.d.ts +28 -0
- package/dist/common/parse.js +56 -0
- package/dist/common/shell.cjs +30 -0
- package/dist/common/shell.d.cts +3 -0
- package/dist/common/shell.d.ts +3 -0
- package/dist/common/shell.js +7 -0
- package/dist/index.cjs +37 -0
- package/dist/index.d.cts +12 -0
- package/dist/index.d.ts +12 -0
- package/dist/index.js +11 -0
- package/dist/markup/__tests__/markup.test.cjs +464 -0
- package/dist/markup/__tests__/markup.test.d.cts +2 -0
- package/dist/markup/__tests__/markup.test.d.ts +2 -0
- package/dist/markup/__tests__/markup.test.js +441 -0
- package/dist/markup/markup.cjs +316 -0
- package/dist/markup/markup.d.cts +24 -0
- package/dist/markup/markup.d.ts +24 -0
- package/dist/markup/markup.js +254 -0
- package/dist/markup/parse.cjs +55 -0
- package/dist/markup/parse.d.cts +17 -0
- package/dist/markup/parse.d.ts +17 -0
- package/dist/markup/parse.js +43 -0
- package/dist/markup/segmentation.cjs +87 -0
- package/dist/markup/segmentation.d.cts +8 -0
- package/dist/markup/segmentation.d.ts +8 -0
- package/dist/markup/segmentation.js +67 -0
- package/dist/markup/semantics.cjs +79 -0
- package/dist/markup/semantics.d.cts +6 -0
- package/dist/markup/semantics.d.ts +6 -0
- package/dist/markup/semantics.js +53 -0
- package/dist/process/AudioEncoding.cjs +16 -0
- package/dist/process/AudioEncoding.d.cts +8 -0
- package/dist/process/AudioEncoding.d.ts +8 -0
- package/dist/process/AudioEncoding.js +0 -0
- package/dist/process/__tests__/processAudiobook.test.cjs +232 -0
- package/dist/process/__tests__/processAudiobook.test.d.cts +2 -0
- package/dist/process/__tests__/processAudiobook.test.d.ts +2 -0
- package/dist/process/__tests__/processAudiobook.test.js +209 -0
- package/dist/process/mime.cjs +43 -0
- package/dist/process/mime.d.cts +3 -0
- package/dist/process/mime.d.ts +3 -0
- package/dist/process/mime.js +24 -0
- package/dist/process/parse.cjs +84 -0
- package/dist/process/parse.d.cts +28 -0
- package/dist/process/parse.d.ts +28 -0
- package/dist/process/parse.js +73 -0
- package/dist/process/processAudiobook.cjs +220 -0
- package/dist/process/processAudiobook.d.cts +24 -0
- package/dist/process/processAudiobook.d.ts +24 -0
- package/dist/process/processAudiobook.js +166 -0
- package/dist/process/ranges.cjs +203 -0
- package/dist/process/ranges.d.cts +15 -0
- package/dist/process/ranges.d.ts +15 -0
- package/dist/process/ranges.js +137 -0
- package/dist/transcribe/parse.cjs +149 -0
- package/dist/transcribe/parse.d.cts +114 -0
- package/dist/transcribe/parse.d.ts +114 -0
- package/dist/transcribe/parse.js +143 -0
- package/dist/transcribe/transcribe.cjs +400 -0
- package/dist/transcribe/transcribe.d.cts +41 -0
- package/dist/transcribe/transcribe.d.ts +41 -0
- package/dist/transcribe/transcribe.js +330 -0
- package/package.json +96 -0
|
@@ -0,0 +1,137 @@
|
|
|
1
|
+
import {
|
|
2
|
+
__callDispose,
|
|
3
|
+
__using
|
|
4
|
+
} from "../chunk-BIEQXUOY.js";
|
|
5
|
+
import { randomUUID } from "node:crypto";
|
|
6
|
+
import { mkdir, rm } from "node:fs/promises";
|
|
7
|
+
import { tmpdir } from "node:os";
|
|
8
|
+
import { basename, dirname, extname, join } from "node:path";
|
|
9
|
+
import { detectVoiceActivity } from "@storyteller-platform/ghost-story/vad";
|
|
10
|
+
import { splitFile } from "../common/ffmpeg.js";
|
|
11
|
+
async function getSafeChapterRanges(input, duration, chapters, maxSeconds, signal, logger) {
|
|
12
|
+
if (!chapters.length) {
|
|
13
|
+
logger == null ? void 0 : logger.info(
|
|
14
|
+
`Track is longer than ${maxSeconds / 60} minutes (${duration / 60}m); using VAD to determine safe split points.`
|
|
15
|
+
);
|
|
16
|
+
const ranges2 = await getSafeRanges(input, duration, maxSeconds, 0, signal);
|
|
17
|
+
return ranges2.map((r) => ({ filepath: input, ...r }));
|
|
18
|
+
}
|
|
19
|
+
const initialRanges = chapters.map((chapter, index) => {
|
|
20
|
+
const next = chapters[index + 1];
|
|
21
|
+
if (!next)
|
|
22
|
+
return {
|
|
23
|
+
filepath: chapter.filename,
|
|
24
|
+
start: chapter.start ?? 0,
|
|
25
|
+
end: duration
|
|
26
|
+
};
|
|
27
|
+
const end = next.filename === chapter.filename ? next.start ?? duration : duration;
|
|
28
|
+
return {
|
|
29
|
+
filepath: chapter.filename,
|
|
30
|
+
start: chapter.start ?? 0,
|
|
31
|
+
end
|
|
32
|
+
};
|
|
33
|
+
});
|
|
34
|
+
const ranges = [];
|
|
35
|
+
for (const range of initialRanges) {
|
|
36
|
+
const chapterDuration = range.end - range.start;
|
|
37
|
+
if (chapterDuration <= maxSeconds) {
|
|
38
|
+
ranges.push(range);
|
|
39
|
+
continue;
|
|
40
|
+
}
|
|
41
|
+
logger == null ? void 0 : logger.info(
|
|
42
|
+
`Chapter is longer than ${maxSeconds / 60} minutes (${duration / 60}m); using VAD to determine safe split points.`
|
|
43
|
+
);
|
|
44
|
+
const chapterRanges = await getSafeRanges(
|
|
45
|
+
range.filepath,
|
|
46
|
+
chapterDuration,
|
|
47
|
+
maxSeconds,
|
|
48
|
+
range.start,
|
|
49
|
+
signal,
|
|
50
|
+
logger
|
|
51
|
+
);
|
|
52
|
+
ranges.push(
|
|
53
|
+
...chapterRanges.map((r) => ({ filepath: range.filepath, ...r }))
|
|
54
|
+
);
|
|
55
|
+
}
|
|
56
|
+
return ranges;
|
|
57
|
+
}
|
|
58
|
+
async function getSafeRanges(input, duration, maxSeconds, start = 0, signal, logger) {
|
|
59
|
+
var _stack = [];
|
|
60
|
+
try {
|
|
61
|
+
const ext = extname(input);
|
|
62
|
+
const rawFilename = basename(input, ext);
|
|
63
|
+
const tmpDir = join(tmpdir(), `storyteller-align-silence-${randomUUID()}`);
|
|
64
|
+
const stack = __using(_stack, new AsyncDisposableStack(), true);
|
|
65
|
+
stack.defer(async () => {
|
|
66
|
+
await rm(tmpDir, { recursive: true, force: true });
|
|
67
|
+
});
|
|
68
|
+
const ranges = [{ start, end: duration + start }];
|
|
69
|
+
for (let i = 0; i + 1 < duration / maxSeconds; i++) {
|
|
70
|
+
if (signal == null ? void 0 : signal.aborted) throw new Error("Aborted");
|
|
71
|
+
const tmpFilepath = join(tmpDir, i.toString(), `${rawFilename}.wav`);
|
|
72
|
+
await mkdir(dirname(tmpFilepath), { recursive: true });
|
|
73
|
+
const approxCutPoint = start + maxSeconds * (i + 1);
|
|
74
|
+
const searchStart = approxCutPoint - 120;
|
|
75
|
+
const searchEnd = approxCutPoint;
|
|
76
|
+
await splitFile(
|
|
77
|
+
input,
|
|
78
|
+
tmpFilepath,
|
|
79
|
+
searchStart,
|
|
80
|
+
searchEnd,
|
|
81
|
+
{},
|
|
82
|
+
signal,
|
|
83
|
+
logger
|
|
84
|
+
);
|
|
85
|
+
const vadTimeline = await detectVoiceActivity(tmpFilepath, {
|
|
86
|
+
engine: "active-gate-og"
|
|
87
|
+
});
|
|
88
|
+
if (vadTimeline.length === 0) {
|
|
89
|
+
break;
|
|
90
|
+
}
|
|
91
|
+
const silenceTimeline = vadTimeline.reduce((acc, entry) => {
|
|
92
|
+
const lastEntry = acc.at(-1);
|
|
93
|
+
if (!lastEntry) {
|
|
94
|
+
return [
|
|
95
|
+
...acc,
|
|
96
|
+
{
|
|
97
|
+
start: entry.endTime + searchStart,
|
|
98
|
+
end: entry.endTime + searchStart
|
|
99
|
+
}
|
|
100
|
+
];
|
|
101
|
+
}
|
|
102
|
+
return [
|
|
103
|
+
...acc.slice(0, -1),
|
|
104
|
+
{
|
|
105
|
+
...lastEntry,
|
|
106
|
+
end: entry.startTime + searchStart
|
|
107
|
+
},
|
|
108
|
+
{
|
|
109
|
+
start: entry.endTime + searchStart,
|
|
110
|
+
end: entry.endTime + searchStart
|
|
111
|
+
}
|
|
112
|
+
];
|
|
113
|
+
}, []);
|
|
114
|
+
const nearestLikelySentenceBreak = silenceTimeline.reduce((acc, entry) => {
|
|
115
|
+
const currLength = acc.end - acc.start;
|
|
116
|
+
const entryLength = entry.end - entry.start;
|
|
117
|
+
return currLength > entryLength ? acc : entry;
|
|
118
|
+
});
|
|
119
|
+
const lastRange = ranges.at(-1);
|
|
120
|
+
lastRange.end = nearestLikelySentenceBreak.start;
|
|
121
|
+
ranges.push({
|
|
122
|
+
start: nearestLikelySentenceBreak.end,
|
|
123
|
+
end: duration + start
|
|
124
|
+
});
|
|
125
|
+
}
|
|
126
|
+
return ranges;
|
|
127
|
+
} catch (_) {
|
|
128
|
+
var _error = _, _hasError = true;
|
|
129
|
+
} finally {
|
|
130
|
+
var _promise = __callDispose(_stack, _error, _hasError);
|
|
131
|
+
_promise && await _promise;
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
export {
|
|
135
|
+
getSafeChapterRanges,
|
|
136
|
+
getSafeRanges
|
|
137
|
+
};
|
|
@@ -0,0 +1,149 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
|
+
var __export = (target, all) => {
|
|
7
|
+
for (var name in all)
|
|
8
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
9
|
+
};
|
|
10
|
+
var __copyProps = (to, from, except, desc) => {
|
|
11
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
12
|
+
for (let key of __getOwnPropNames(from))
|
|
13
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
14
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
15
|
+
}
|
|
16
|
+
return to;
|
|
17
|
+
};
|
|
18
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
19
|
+
var parse_exports = {};
|
|
20
|
+
__export(parse_exports, {
|
|
21
|
+
transcribeCommand: () => transcribeCommand,
|
|
22
|
+
transcribeParser: () => transcribeParser
|
|
23
|
+
});
|
|
24
|
+
module.exports = __toCommonJS(parse_exports);
|
|
25
|
+
var import_core = require("@optique/core");
|
|
26
|
+
var import_valueparser = require("@optique/core/valueparser");
|
|
27
|
+
var import_valueparser2 = require("@optique/run/valueparser");
|
|
28
|
+
var import_ghost_story = require("@storyteller-platform/ghost-story");
|
|
29
|
+
var import_parse = require("../common/parse.cjs");
|
|
30
|
+
const transcribeParser = (0, import_core.or)(
|
|
31
|
+
(0, import_core.object)("whisper.cpp", {
|
|
32
|
+
engine: (0, import_core.option)(
|
|
33
|
+
"--engine",
|
|
34
|
+
"-e",
|
|
35
|
+
(0, import_core.choice)(["whisper.cpp"], { metavar: "whisper.cpp" })
|
|
36
|
+
),
|
|
37
|
+
model: (0, import_core.withDefault)(
|
|
38
|
+
(0, import_core.option)("--model", "-m", (0, import_core.choice)(import_ghost_story.WHISPER_MODELS, { metavar: "MODEL" }), {
|
|
39
|
+
description: import_core.message`The whisper model to use`
|
|
40
|
+
}),
|
|
41
|
+
"tiny.en"
|
|
42
|
+
),
|
|
43
|
+
threads: (0, import_core.withDefault)((0, import_core.option)("--threads", (0, import_core.integer)()), 4),
|
|
44
|
+
processors: (0, import_core.withDefault)(
|
|
45
|
+
(0, import_core.option)("--processors", (0, import_core.integer)(), {
|
|
46
|
+
description: import_core.message`The number of processors to use (values greater than 1 may affect timing accuracy)`
|
|
47
|
+
}),
|
|
48
|
+
1
|
|
49
|
+
),
|
|
50
|
+
cpuOverride: (0, import_core.optional)(
|
|
51
|
+
(0, import_core.option)(
|
|
52
|
+
"--cpu-override",
|
|
53
|
+
(0, import_core.choice)(["blas", "cpu"], { metavar: "CPU_ENGINE" }),
|
|
54
|
+
{
|
|
55
|
+
description: import_core.message`When provided, will use this whisper variant even if another is available`
|
|
56
|
+
}
|
|
57
|
+
)
|
|
58
|
+
)
|
|
59
|
+
}),
|
|
60
|
+
(0, import_core.object)("whisper-server", {
|
|
61
|
+
engine: (0, import_core.option)(
|
|
62
|
+
"--engine",
|
|
63
|
+
"-e",
|
|
64
|
+
(0, import_core.choice)(["whisper-server"], { metavar: "whisper-server" })
|
|
65
|
+
),
|
|
66
|
+
whisperServerUrl: (0, import_core.map)(
|
|
67
|
+
(0, import_core.option)("--whisper-server-url", (0, import_valueparser.url)()),
|
|
68
|
+
(url2) => url2.toString()
|
|
69
|
+
),
|
|
70
|
+
whisperServerApiKey: (0, import_core.optional)((0, import_core.option)("--whisper-server-api-key", (0, import_valueparser.string)()))
|
|
71
|
+
}),
|
|
72
|
+
(0, import_core.object)("openai-cloud", {
|
|
73
|
+
engine: (0, import_core.option)(
|
|
74
|
+
"--engine",
|
|
75
|
+
"-e",
|
|
76
|
+
(0, import_core.choice)(["openai-cloud"], { metavar: "openai-cloud" })
|
|
77
|
+
),
|
|
78
|
+
openaiModelName: (0, import_core.optional)((0, import_core.option)("--openai-model", (0, import_valueparser.string)())),
|
|
79
|
+
openAiApiKey: (0, import_core.optional)((0, import_core.option)("--openai-api-key", (0, import_valueparser.string)())),
|
|
80
|
+
openAiOrganization: (0, import_core.optional)((0, import_core.option)("--openai-organization", (0, import_valueparser.string)())),
|
|
81
|
+
openAiBaseUrl: (0, import_core.optional)(
|
|
82
|
+
(0, import_core.map)((0, import_core.option)("--openai-base-url", (0, import_valueparser.url)()), (url2) => url2.toString())
|
|
83
|
+
)
|
|
84
|
+
}),
|
|
85
|
+
(0, import_core.object)("google-cloud", {
|
|
86
|
+
engine: (0, import_core.option)(
|
|
87
|
+
"--engine",
|
|
88
|
+
"-e",
|
|
89
|
+
(0, import_core.choice)(["google-cloud"], { metavar: "google-cloud" })
|
|
90
|
+
),
|
|
91
|
+
googleCloudApiKey: (0, import_core.option)("--google-cloud-api-key", (0, import_valueparser.string)())
|
|
92
|
+
}),
|
|
93
|
+
(0, import_core.object)("microsoft-azure", {
|
|
94
|
+
engine: (0, import_core.option)(
|
|
95
|
+
"--engine",
|
|
96
|
+
"-e",
|
|
97
|
+
(0, import_core.choice)(["microsoft-azure"], { metavar: "microsoft-azure" })
|
|
98
|
+
),
|
|
99
|
+
azureServiceRegion: (0, import_core.option)("--azure-service-region", (0, import_valueparser.string)()),
|
|
100
|
+
azureSubscriptionKey: (0, import_core.option)("--azure-subscription-key", (0, import_valueparser.string)())
|
|
101
|
+
}),
|
|
102
|
+
(0, import_core.object)("amazon-transcribe", {
|
|
103
|
+
engine: (0, import_core.option)(
|
|
104
|
+
"--engine",
|
|
105
|
+
"-e",
|
|
106
|
+
(0, import_core.choice)(["amazon-transcribe"], { metavar: "amazon-transcribe" })
|
|
107
|
+
),
|
|
108
|
+
amazonTranscribeRegion: (0, import_core.option)("--amazon-transcribe-region", (0, import_valueparser.string)()),
|
|
109
|
+
amazonTranscribeAccessKeyId: (0, import_core.option)(
|
|
110
|
+
"--amazon-transcribe-access-key-id",
|
|
111
|
+
(0, import_valueparser.string)()
|
|
112
|
+
),
|
|
113
|
+
amazonTranscribeSecretAccessKey: (0, import_core.option)(
|
|
114
|
+
"--amazon-transcribe-secret-access-key",
|
|
115
|
+
(0, import_valueparser.string)()
|
|
116
|
+
)
|
|
117
|
+
}),
|
|
118
|
+
(0, import_core.object)("deepgram", {
|
|
119
|
+
engine: (0, import_core.option)(
|
|
120
|
+
"--engine",
|
|
121
|
+
"-e",
|
|
122
|
+
(0, import_core.choice)(["deepgram"], { metavar: "deepgram" })
|
|
123
|
+
),
|
|
124
|
+
deepgramApiKey: (0, import_core.option)("--deepgram-api-key", (0, import_valueparser.string)()),
|
|
125
|
+
deepgramModel: (0, import_core.withDefault)((0, import_core.option)("--deepgram-model", (0, import_valueparser.string)()), "nova-3")
|
|
126
|
+
})
|
|
127
|
+
);
|
|
128
|
+
const transcribeCommand = (0, import_core.command)(
|
|
129
|
+
"transcribe",
|
|
130
|
+
(0, import_core.merge)(
|
|
131
|
+
(0, import_core.object)({
|
|
132
|
+
action: (0, import_core.constant)("transcribe"),
|
|
133
|
+
input: (0, import_core.argument)(
|
|
134
|
+
(0, import_valueparser2.path)({ type: "directory", mustExist: true, metavar: "INPUT" })
|
|
135
|
+
),
|
|
136
|
+
output: (0, import_core.argument)((0, import_valueparser2.path)({ metavar: "OUTPUT", type: "directory" }))
|
|
137
|
+
}),
|
|
138
|
+
import_parse.parallelismParser,
|
|
139
|
+
import_parse.languageParser,
|
|
140
|
+
transcribeParser,
|
|
141
|
+
import_parse.loggingParser
|
|
142
|
+
),
|
|
143
|
+
{ description: import_core.message`Transcribe a directory of audiobook files.` }
|
|
144
|
+
);
|
|
145
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
146
|
+
0 && (module.exports = {
|
|
147
|
+
transcribeCommand,
|
|
148
|
+
transcribeParser
|
|
149
|
+
});
|
|
@@ -0,0 +1,114 @@
|
|
|
1
|
+
import * as _optique_core from '@optique/core';
|
|
2
|
+
|
|
3
|
+
declare const transcribeParser: _optique_core.Parser<"sync", {
|
|
4
|
+
readonly engine: "whisper.cpp";
|
|
5
|
+
readonly model: "tiny" | "tiny.en" | "tiny-q5_1" | "tiny.en-q5_1" | "tiny-q8_0" | "base" | "base.en" | "base-q5_1" | "base.en-q5_1" | "base-q8_0" | "small" | "small.en" | "small-q5_1" | "small.en-q5_1" | "small-q8_0" | "medium" | "medium.en" | "medium-q5_0" | "medium.en-q5_0" | "medium-q8_0" | "large-v1" | "large-v2" | "large-v2-q5_0" | "large-v2-q8_0" | "large-v3" | "large-v3-q5_0" | "large-v3-turbo" | "large-v3-turbo-q5_0" | "large-v3-turbo-q8_0";
|
|
6
|
+
readonly threads: number;
|
|
7
|
+
readonly processors: number;
|
|
8
|
+
readonly cpuOverride: "blas" | "cpu" | undefined;
|
|
9
|
+
} | {
|
|
10
|
+
readonly engine: "whisper-server";
|
|
11
|
+
readonly whisperServerUrl: string;
|
|
12
|
+
readonly whisperServerApiKey: string | undefined;
|
|
13
|
+
} | {
|
|
14
|
+
readonly engine: "openai-cloud";
|
|
15
|
+
readonly openaiModelName: string | undefined;
|
|
16
|
+
readonly openAiApiKey: string | undefined;
|
|
17
|
+
readonly openAiOrganization: string | undefined;
|
|
18
|
+
readonly openAiBaseUrl: string | undefined;
|
|
19
|
+
} | {
|
|
20
|
+
readonly engine: "google-cloud";
|
|
21
|
+
readonly googleCloudApiKey: string;
|
|
22
|
+
} | {
|
|
23
|
+
readonly engine: "microsoft-azure";
|
|
24
|
+
readonly azureServiceRegion: string;
|
|
25
|
+
readonly azureSubscriptionKey: string;
|
|
26
|
+
} | {
|
|
27
|
+
readonly engine: "amazon-transcribe";
|
|
28
|
+
readonly amazonTranscribeRegion: string;
|
|
29
|
+
readonly amazonTranscribeAccessKeyId: string;
|
|
30
|
+
readonly amazonTranscribeSecretAccessKey: string;
|
|
31
|
+
} | {
|
|
32
|
+
readonly engine: "deepgram";
|
|
33
|
+
readonly deepgramApiKey: string;
|
|
34
|
+
readonly deepgramModel: string;
|
|
35
|
+
}, [0, _optique_core.ParserResult<{
|
|
36
|
+
readonly engine: _optique_core.ValueParserResult<"whisper.cpp"> | undefined;
|
|
37
|
+
readonly model: [_optique_core.ValueParserResult<"tiny" | "tiny.en" | "tiny-q5_1" | "tiny.en-q5_1" | "tiny-q8_0" | "base" | "base.en" | "base-q5_1" | "base.en-q5_1" | "base-q8_0" | "small" | "small.en" | "small-q5_1" | "small.en-q5_1" | "small-q8_0" | "medium" | "medium.en" | "medium-q5_0" | "medium.en-q5_0" | "medium-q8_0" | "large-v1" | "large-v2" | "large-v2-q5_0" | "large-v2-q8_0" | "large-v3" | "large-v3-q5_0" | "large-v3-turbo" | "large-v3-turbo-q5_0" | "large-v3-turbo-q8_0"> | undefined] | undefined;
|
|
38
|
+
readonly threads: [_optique_core.ValueParserResult<number> | undefined] | undefined;
|
|
39
|
+
readonly processors: [_optique_core.ValueParserResult<number> | undefined] | undefined;
|
|
40
|
+
readonly cpuOverride: [_optique_core.ValueParserResult<"blas" | "cpu"> | undefined] | undefined;
|
|
41
|
+
}>] | [1, _optique_core.ParserResult<{
|
|
42
|
+
readonly engine: _optique_core.ValueParserResult<"whisper-server"> | undefined;
|
|
43
|
+
readonly whisperServerUrl: _optique_core.ValueParserResult<URL> | undefined;
|
|
44
|
+
readonly whisperServerApiKey: [_optique_core.ValueParserResult<string> | undefined] | undefined;
|
|
45
|
+
}>] | [2, _optique_core.ParserResult<{
|
|
46
|
+
readonly engine: _optique_core.ValueParserResult<"openai-cloud"> | undefined;
|
|
47
|
+
readonly openaiModelName: [_optique_core.ValueParserResult<string> | undefined] | undefined;
|
|
48
|
+
readonly openAiApiKey: [_optique_core.ValueParserResult<string> | undefined] | undefined;
|
|
49
|
+
readonly openAiOrganization: [_optique_core.ValueParserResult<string> | undefined] | undefined;
|
|
50
|
+
readonly openAiBaseUrl: [_optique_core.ValueParserResult<URL> | undefined] | undefined;
|
|
51
|
+
}>] | [3, _optique_core.ParserResult<{
|
|
52
|
+
readonly engine: _optique_core.ValueParserResult<"google-cloud"> | undefined;
|
|
53
|
+
readonly googleCloudApiKey: _optique_core.ValueParserResult<string> | undefined;
|
|
54
|
+
}>] | [4, _optique_core.ParserResult<{
|
|
55
|
+
readonly engine: _optique_core.ValueParserResult<"microsoft-azure"> | undefined;
|
|
56
|
+
readonly azureServiceRegion: _optique_core.ValueParserResult<string> | undefined;
|
|
57
|
+
readonly azureSubscriptionKey: _optique_core.ValueParserResult<string> | undefined;
|
|
58
|
+
}>] | [5, _optique_core.ParserResult<{
|
|
59
|
+
readonly engine: _optique_core.ValueParserResult<"amazon-transcribe"> | undefined;
|
|
60
|
+
readonly amazonTranscribeRegion: _optique_core.ValueParserResult<string> | undefined;
|
|
61
|
+
readonly amazonTranscribeAccessKeyId: _optique_core.ValueParserResult<string> | undefined;
|
|
62
|
+
readonly amazonTranscribeSecretAccessKey: _optique_core.ValueParserResult<string> | undefined;
|
|
63
|
+
}>] | [6, _optique_core.ParserResult<{
|
|
64
|
+
readonly engine: _optique_core.ValueParserResult<"deepgram"> | undefined;
|
|
65
|
+
readonly deepgramApiKey: _optique_core.ValueParserResult<string> | undefined;
|
|
66
|
+
readonly deepgramModel: [_optique_core.ValueParserResult<string> | undefined] | undefined;
|
|
67
|
+
}>] | undefined>;
|
|
68
|
+
declare const transcribeCommand: _optique_core.Parser<"sync", {
|
|
69
|
+
readonly action: "transcribe";
|
|
70
|
+
readonly input: string;
|
|
71
|
+
readonly output: string;
|
|
72
|
+
} & {
|
|
73
|
+
readonly parallelism: number;
|
|
74
|
+
} & ({
|
|
75
|
+
readonly language: Intl.Locale | undefined;
|
|
76
|
+
} & (({
|
|
77
|
+
readonly engine: "whisper.cpp";
|
|
78
|
+
readonly model: "tiny" | "tiny.en" | "tiny-q5_1" | "tiny.en-q5_1" | "tiny-q8_0" | "base" | "base.en" | "base-q5_1" | "base.en-q5_1" | "base-q8_0" | "small" | "small.en" | "small-q5_1" | "small.en-q5_1" | "small-q8_0" | "medium" | "medium.en" | "medium-q5_0" | "medium.en-q5_0" | "medium-q8_0" | "large-v1" | "large-v2" | "large-v2-q5_0" | "large-v2-q8_0" | "large-v3" | "large-v3-q5_0" | "large-v3-turbo" | "large-v3-turbo-q5_0" | "large-v3-turbo-q8_0";
|
|
79
|
+
readonly threads: number;
|
|
80
|
+
readonly processors: number;
|
|
81
|
+
readonly cpuOverride: "blas" | "cpu" | undefined;
|
|
82
|
+
} | {
|
|
83
|
+
readonly engine: "whisper-server";
|
|
84
|
+
readonly whisperServerUrl: string;
|
|
85
|
+
readonly whisperServerApiKey: string | undefined;
|
|
86
|
+
} | {
|
|
87
|
+
readonly engine: "openai-cloud";
|
|
88
|
+
readonly openaiModelName: string | undefined;
|
|
89
|
+
readonly openAiApiKey: string | undefined;
|
|
90
|
+
readonly openAiOrganization: string | undefined;
|
|
91
|
+
readonly openAiBaseUrl: string | undefined;
|
|
92
|
+
} | {
|
|
93
|
+
readonly engine: "google-cloud";
|
|
94
|
+
readonly googleCloudApiKey: string;
|
|
95
|
+
} | {
|
|
96
|
+
readonly engine: "microsoft-azure";
|
|
97
|
+
readonly azureServiceRegion: string;
|
|
98
|
+
readonly azureSubscriptionKey: string;
|
|
99
|
+
} | {
|
|
100
|
+
readonly engine: "amazon-transcribe";
|
|
101
|
+
readonly amazonTranscribeRegion: string;
|
|
102
|
+
readonly amazonTranscribeAccessKeyId: string;
|
|
103
|
+
readonly amazonTranscribeSecretAccessKey: string;
|
|
104
|
+
} | {
|
|
105
|
+
readonly engine: "deepgram";
|
|
106
|
+
readonly deepgramApiKey: string;
|
|
107
|
+
readonly deepgramModel: string;
|
|
108
|
+
}) & {
|
|
109
|
+
readonly noProgress: boolean;
|
|
110
|
+
readonly logLevel: "silent" | "debug" | "info" | "warn" | "error";
|
|
111
|
+
readonly time: boolean;
|
|
112
|
+
})), ["matched", string] | ["parsing", Record<string | symbol, unknown>] | undefined>;
|
|
113
|
+
|
|
114
|
+
export { transcribeCommand, transcribeParser };
|
|
@@ -0,0 +1,114 @@
|
|
|
1
|
+
import * as _optique_core from '@optique/core';
|
|
2
|
+
|
|
3
|
+
declare const transcribeParser: _optique_core.Parser<"sync", {
|
|
4
|
+
readonly engine: "whisper.cpp";
|
|
5
|
+
readonly model: "tiny" | "tiny.en" | "tiny-q5_1" | "tiny.en-q5_1" | "tiny-q8_0" | "base" | "base.en" | "base-q5_1" | "base.en-q5_1" | "base-q8_0" | "small" | "small.en" | "small-q5_1" | "small.en-q5_1" | "small-q8_0" | "medium" | "medium.en" | "medium-q5_0" | "medium.en-q5_0" | "medium-q8_0" | "large-v1" | "large-v2" | "large-v2-q5_0" | "large-v2-q8_0" | "large-v3" | "large-v3-q5_0" | "large-v3-turbo" | "large-v3-turbo-q5_0" | "large-v3-turbo-q8_0";
|
|
6
|
+
readonly threads: number;
|
|
7
|
+
readonly processors: number;
|
|
8
|
+
readonly cpuOverride: "blas" | "cpu" | undefined;
|
|
9
|
+
} | {
|
|
10
|
+
readonly engine: "whisper-server";
|
|
11
|
+
readonly whisperServerUrl: string;
|
|
12
|
+
readonly whisperServerApiKey: string | undefined;
|
|
13
|
+
} | {
|
|
14
|
+
readonly engine: "openai-cloud";
|
|
15
|
+
readonly openaiModelName: string | undefined;
|
|
16
|
+
readonly openAiApiKey: string | undefined;
|
|
17
|
+
readonly openAiOrganization: string | undefined;
|
|
18
|
+
readonly openAiBaseUrl: string | undefined;
|
|
19
|
+
} | {
|
|
20
|
+
readonly engine: "google-cloud";
|
|
21
|
+
readonly googleCloudApiKey: string;
|
|
22
|
+
} | {
|
|
23
|
+
readonly engine: "microsoft-azure";
|
|
24
|
+
readonly azureServiceRegion: string;
|
|
25
|
+
readonly azureSubscriptionKey: string;
|
|
26
|
+
} | {
|
|
27
|
+
readonly engine: "amazon-transcribe";
|
|
28
|
+
readonly amazonTranscribeRegion: string;
|
|
29
|
+
readonly amazonTranscribeAccessKeyId: string;
|
|
30
|
+
readonly amazonTranscribeSecretAccessKey: string;
|
|
31
|
+
} | {
|
|
32
|
+
readonly engine: "deepgram";
|
|
33
|
+
readonly deepgramApiKey: string;
|
|
34
|
+
readonly deepgramModel: string;
|
|
35
|
+
}, [0, _optique_core.ParserResult<{
|
|
36
|
+
readonly engine: _optique_core.ValueParserResult<"whisper.cpp"> | undefined;
|
|
37
|
+
readonly model: [_optique_core.ValueParserResult<"tiny" | "tiny.en" | "tiny-q5_1" | "tiny.en-q5_1" | "tiny-q8_0" | "base" | "base.en" | "base-q5_1" | "base.en-q5_1" | "base-q8_0" | "small" | "small.en" | "small-q5_1" | "small.en-q5_1" | "small-q8_0" | "medium" | "medium.en" | "medium-q5_0" | "medium.en-q5_0" | "medium-q8_0" | "large-v1" | "large-v2" | "large-v2-q5_0" | "large-v2-q8_0" | "large-v3" | "large-v3-q5_0" | "large-v3-turbo" | "large-v3-turbo-q5_0" | "large-v3-turbo-q8_0"> | undefined] | undefined;
|
|
38
|
+
readonly threads: [_optique_core.ValueParserResult<number> | undefined] | undefined;
|
|
39
|
+
readonly processors: [_optique_core.ValueParserResult<number> | undefined] | undefined;
|
|
40
|
+
readonly cpuOverride: [_optique_core.ValueParserResult<"blas" | "cpu"> | undefined] | undefined;
|
|
41
|
+
}>] | [1, _optique_core.ParserResult<{
|
|
42
|
+
readonly engine: _optique_core.ValueParserResult<"whisper-server"> | undefined;
|
|
43
|
+
readonly whisperServerUrl: _optique_core.ValueParserResult<URL> | undefined;
|
|
44
|
+
readonly whisperServerApiKey: [_optique_core.ValueParserResult<string> | undefined] | undefined;
|
|
45
|
+
}>] | [2, _optique_core.ParserResult<{
|
|
46
|
+
readonly engine: _optique_core.ValueParserResult<"openai-cloud"> | undefined;
|
|
47
|
+
readonly openaiModelName: [_optique_core.ValueParserResult<string> | undefined] | undefined;
|
|
48
|
+
readonly openAiApiKey: [_optique_core.ValueParserResult<string> | undefined] | undefined;
|
|
49
|
+
readonly openAiOrganization: [_optique_core.ValueParserResult<string> | undefined] | undefined;
|
|
50
|
+
readonly openAiBaseUrl: [_optique_core.ValueParserResult<URL> | undefined] | undefined;
|
|
51
|
+
}>] | [3, _optique_core.ParserResult<{
|
|
52
|
+
readonly engine: _optique_core.ValueParserResult<"google-cloud"> | undefined;
|
|
53
|
+
readonly googleCloudApiKey: _optique_core.ValueParserResult<string> | undefined;
|
|
54
|
+
}>] | [4, _optique_core.ParserResult<{
|
|
55
|
+
readonly engine: _optique_core.ValueParserResult<"microsoft-azure"> | undefined;
|
|
56
|
+
readonly azureServiceRegion: _optique_core.ValueParserResult<string> | undefined;
|
|
57
|
+
readonly azureSubscriptionKey: _optique_core.ValueParserResult<string> | undefined;
|
|
58
|
+
}>] | [5, _optique_core.ParserResult<{
|
|
59
|
+
readonly engine: _optique_core.ValueParserResult<"amazon-transcribe"> | undefined;
|
|
60
|
+
readonly amazonTranscribeRegion: _optique_core.ValueParserResult<string> | undefined;
|
|
61
|
+
readonly amazonTranscribeAccessKeyId: _optique_core.ValueParserResult<string> | undefined;
|
|
62
|
+
readonly amazonTranscribeSecretAccessKey: _optique_core.ValueParserResult<string> | undefined;
|
|
63
|
+
}>] | [6, _optique_core.ParserResult<{
|
|
64
|
+
readonly engine: _optique_core.ValueParserResult<"deepgram"> | undefined;
|
|
65
|
+
readonly deepgramApiKey: _optique_core.ValueParserResult<string> | undefined;
|
|
66
|
+
readonly deepgramModel: [_optique_core.ValueParserResult<string> | undefined] | undefined;
|
|
67
|
+
}>] | undefined>;
|
|
68
|
+
declare const transcribeCommand: _optique_core.Parser<"sync", {
|
|
69
|
+
readonly action: "transcribe";
|
|
70
|
+
readonly input: string;
|
|
71
|
+
readonly output: string;
|
|
72
|
+
} & {
|
|
73
|
+
readonly parallelism: number;
|
|
74
|
+
} & ({
|
|
75
|
+
readonly language: Intl.Locale | undefined;
|
|
76
|
+
} & (({
|
|
77
|
+
readonly engine: "whisper.cpp";
|
|
78
|
+
readonly model: "tiny" | "tiny.en" | "tiny-q5_1" | "tiny.en-q5_1" | "tiny-q8_0" | "base" | "base.en" | "base-q5_1" | "base.en-q5_1" | "base-q8_0" | "small" | "small.en" | "small-q5_1" | "small.en-q5_1" | "small-q8_0" | "medium" | "medium.en" | "medium-q5_0" | "medium.en-q5_0" | "medium-q8_0" | "large-v1" | "large-v2" | "large-v2-q5_0" | "large-v2-q8_0" | "large-v3" | "large-v3-q5_0" | "large-v3-turbo" | "large-v3-turbo-q5_0" | "large-v3-turbo-q8_0";
|
|
79
|
+
readonly threads: number;
|
|
80
|
+
readonly processors: number;
|
|
81
|
+
readonly cpuOverride: "blas" | "cpu" | undefined;
|
|
82
|
+
} | {
|
|
83
|
+
readonly engine: "whisper-server";
|
|
84
|
+
readonly whisperServerUrl: string;
|
|
85
|
+
readonly whisperServerApiKey: string | undefined;
|
|
86
|
+
} | {
|
|
87
|
+
readonly engine: "openai-cloud";
|
|
88
|
+
readonly openaiModelName: string | undefined;
|
|
89
|
+
readonly openAiApiKey: string | undefined;
|
|
90
|
+
readonly openAiOrganization: string | undefined;
|
|
91
|
+
readonly openAiBaseUrl: string | undefined;
|
|
92
|
+
} | {
|
|
93
|
+
readonly engine: "google-cloud";
|
|
94
|
+
readonly googleCloudApiKey: string;
|
|
95
|
+
} | {
|
|
96
|
+
readonly engine: "microsoft-azure";
|
|
97
|
+
readonly azureServiceRegion: string;
|
|
98
|
+
readonly azureSubscriptionKey: string;
|
|
99
|
+
} | {
|
|
100
|
+
readonly engine: "amazon-transcribe";
|
|
101
|
+
readonly amazonTranscribeRegion: string;
|
|
102
|
+
readonly amazonTranscribeAccessKeyId: string;
|
|
103
|
+
readonly amazonTranscribeSecretAccessKey: string;
|
|
104
|
+
} | {
|
|
105
|
+
readonly engine: "deepgram";
|
|
106
|
+
readonly deepgramApiKey: string;
|
|
107
|
+
readonly deepgramModel: string;
|
|
108
|
+
}) & {
|
|
109
|
+
readonly noProgress: boolean;
|
|
110
|
+
readonly logLevel: "silent" | "debug" | "info" | "warn" | "error";
|
|
111
|
+
readonly time: boolean;
|
|
112
|
+
})), ["matched", string] | ["parsing", Record<string | symbol, unknown>] | undefined>;
|
|
113
|
+
|
|
114
|
+
export { transcribeCommand, transcribeParser };
|
|
@@ -0,0 +1,143 @@
|
|
|
1
|
+
import "../chunk-BIEQXUOY.js";
|
|
2
|
+
import {
|
|
3
|
+
argument,
|
|
4
|
+
choice,
|
|
5
|
+
command,
|
|
6
|
+
constant,
|
|
7
|
+
integer,
|
|
8
|
+
map,
|
|
9
|
+
merge,
|
|
10
|
+
message,
|
|
11
|
+
object,
|
|
12
|
+
option,
|
|
13
|
+
optional,
|
|
14
|
+
or,
|
|
15
|
+
withDefault
|
|
16
|
+
} from "@optique/core";
|
|
17
|
+
import { string, url } from "@optique/core/valueparser";
|
|
18
|
+
import { path } from "@optique/run/valueparser";
|
|
19
|
+
import { WHISPER_MODELS } from "@storyteller-platform/ghost-story";
|
|
20
|
+
import {
|
|
21
|
+
languageParser,
|
|
22
|
+
loggingParser,
|
|
23
|
+
parallelismParser
|
|
24
|
+
} from "../common/parse.js";
|
|
25
|
+
const transcribeParser = or(
|
|
26
|
+
object("whisper.cpp", {
|
|
27
|
+
engine: option(
|
|
28
|
+
"--engine",
|
|
29
|
+
"-e",
|
|
30
|
+
choice(["whisper.cpp"], { metavar: "whisper.cpp" })
|
|
31
|
+
),
|
|
32
|
+
model: withDefault(
|
|
33
|
+
option("--model", "-m", choice(WHISPER_MODELS, { metavar: "MODEL" }), {
|
|
34
|
+
description: message`The whisper model to use`
|
|
35
|
+
}),
|
|
36
|
+
"tiny.en"
|
|
37
|
+
),
|
|
38
|
+
threads: withDefault(option("--threads", integer()), 4),
|
|
39
|
+
processors: withDefault(
|
|
40
|
+
option("--processors", integer(), {
|
|
41
|
+
description: message`The number of processors to use (values greater than 1 may affect timing accuracy)`
|
|
42
|
+
}),
|
|
43
|
+
1
|
|
44
|
+
),
|
|
45
|
+
cpuOverride: optional(
|
|
46
|
+
option(
|
|
47
|
+
"--cpu-override",
|
|
48
|
+
choice(["blas", "cpu"], { metavar: "CPU_ENGINE" }),
|
|
49
|
+
{
|
|
50
|
+
description: message`When provided, will use this whisper variant even if another is available`
|
|
51
|
+
}
|
|
52
|
+
)
|
|
53
|
+
)
|
|
54
|
+
}),
|
|
55
|
+
object("whisper-server", {
|
|
56
|
+
engine: option(
|
|
57
|
+
"--engine",
|
|
58
|
+
"-e",
|
|
59
|
+
choice(["whisper-server"], { metavar: "whisper-server" })
|
|
60
|
+
),
|
|
61
|
+
whisperServerUrl: map(
|
|
62
|
+
option("--whisper-server-url", url()),
|
|
63
|
+
(url2) => url2.toString()
|
|
64
|
+
),
|
|
65
|
+
whisperServerApiKey: optional(option("--whisper-server-api-key", string()))
|
|
66
|
+
}),
|
|
67
|
+
object("openai-cloud", {
|
|
68
|
+
engine: option(
|
|
69
|
+
"--engine",
|
|
70
|
+
"-e",
|
|
71
|
+
choice(["openai-cloud"], { metavar: "openai-cloud" })
|
|
72
|
+
),
|
|
73
|
+
openaiModelName: optional(option("--openai-model", string())),
|
|
74
|
+
openAiApiKey: optional(option("--openai-api-key", string())),
|
|
75
|
+
openAiOrganization: optional(option("--openai-organization", string())),
|
|
76
|
+
openAiBaseUrl: optional(
|
|
77
|
+
map(option("--openai-base-url", url()), (url2) => url2.toString())
|
|
78
|
+
)
|
|
79
|
+
}),
|
|
80
|
+
object("google-cloud", {
|
|
81
|
+
engine: option(
|
|
82
|
+
"--engine",
|
|
83
|
+
"-e",
|
|
84
|
+
choice(["google-cloud"], { metavar: "google-cloud" })
|
|
85
|
+
),
|
|
86
|
+
googleCloudApiKey: option("--google-cloud-api-key", string())
|
|
87
|
+
}),
|
|
88
|
+
object("microsoft-azure", {
|
|
89
|
+
engine: option(
|
|
90
|
+
"--engine",
|
|
91
|
+
"-e",
|
|
92
|
+
choice(["microsoft-azure"], { metavar: "microsoft-azure" })
|
|
93
|
+
),
|
|
94
|
+
azureServiceRegion: option("--azure-service-region", string()),
|
|
95
|
+
azureSubscriptionKey: option("--azure-subscription-key", string())
|
|
96
|
+
}),
|
|
97
|
+
object("amazon-transcribe", {
|
|
98
|
+
engine: option(
|
|
99
|
+
"--engine",
|
|
100
|
+
"-e",
|
|
101
|
+
choice(["amazon-transcribe"], { metavar: "amazon-transcribe" })
|
|
102
|
+
),
|
|
103
|
+
amazonTranscribeRegion: option("--amazon-transcribe-region", string()),
|
|
104
|
+
amazonTranscribeAccessKeyId: option(
|
|
105
|
+
"--amazon-transcribe-access-key-id",
|
|
106
|
+
string()
|
|
107
|
+
),
|
|
108
|
+
amazonTranscribeSecretAccessKey: option(
|
|
109
|
+
"--amazon-transcribe-secret-access-key",
|
|
110
|
+
string()
|
|
111
|
+
)
|
|
112
|
+
}),
|
|
113
|
+
object("deepgram", {
|
|
114
|
+
engine: option(
|
|
115
|
+
"--engine",
|
|
116
|
+
"-e",
|
|
117
|
+
choice(["deepgram"], { metavar: "deepgram" })
|
|
118
|
+
),
|
|
119
|
+
deepgramApiKey: option("--deepgram-api-key", string()),
|
|
120
|
+
deepgramModel: withDefault(option("--deepgram-model", string()), "nova-3")
|
|
121
|
+
})
|
|
122
|
+
);
|
|
123
|
+
const transcribeCommand = command(
|
|
124
|
+
"transcribe",
|
|
125
|
+
merge(
|
|
126
|
+
object({
|
|
127
|
+
action: constant("transcribe"),
|
|
128
|
+
input: argument(
|
|
129
|
+
path({ type: "directory", mustExist: true, metavar: "INPUT" })
|
|
130
|
+
),
|
|
131
|
+
output: argument(path({ metavar: "OUTPUT", type: "directory" }))
|
|
132
|
+
}),
|
|
133
|
+
parallelismParser,
|
|
134
|
+
languageParser,
|
|
135
|
+
transcribeParser,
|
|
136
|
+
loggingParser
|
|
137
|
+
),
|
|
138
|
+
{ description: message`Transcribe a directory of audiobook files.` }
|
|
139
|
+
);
|
|
140
|
+
export {
|
|
141
|
+
transcribeCommand,
|
|
142
|
+
transcribeParser
|
|
143
|
+
};
|