@storyteller-platform/align 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE.txt +21 -0
- package/README.md +3 -0
- package/dist/align/align.cjs +525 -0
- package/dist/align/align.d.cts +58 -0
- package/dist/align/align.d.ts +58 -0
- package/dist/align/align.js +458 -0
- package/dist/align/fuzzy.cjs +164 -0
- package/dist/align/fuzzy.d.cts +6 -0
- package/dist/align/fuzzy.d.ts +6 -0
- package/dist/align/fuzzy.js +141 -0
- package/dist/align/getSentenceRanges.cjs +304 -0
- package/dist/align/getSentenceRanges.d.cts +31 -0
- package/dist/align/getSentenceRanges.d.ts +31 -0
- package/dist/align/getSentenceRanges.js +277 -0
- package/dist/align/parse.cjs +63 -0
- package/dist/align/parse.d.cts +30 -0
- package/dist/align/parse.d.ts +30 -0
- package/dist/align/parse.js +51 -0
- package/dist/chunk-BIEQXUOY.js +50 -0
- package/dist/cli/bin.cjs +368 -0
- package/dist/cli/bin.d.cts +1 -0
- package/dist/cli/bin.d.ts +1 -0
- package/dist/cli/bin.js +319 -0
- package/dist/common/ffmpeg.cjs +232 -0
- package/dist/common/ffmpeg.d.cts +33 -0
- package/dist/common/ffmpeg.d.ts +33 -0
- package/dist/common/ffmpeg.js +196 -0
- package/dist/common/logging.cjs +45 -0
- package/dist/common/logging.d.cts +5 -0
- package/dist/common/logging.d.ts +5 -0
- package/dist/common/logging.js +12 -0
- package/dist/common/parse.cjs +73 -0
- package/dist/common/parse.d.cts +28 -0
- package/dist/common/parse.d.ts +28 -0
- package/dist/common/parse.js +56 -0
- package/dist/common/shell.cjs +30 -0
- package/dist/common/shell.d.cts +3 -0
- package/dist/common/shell.d.ts +3 -0
- package/dist/common/shell.js +7 -0
- package/dist/index.cjs +37 -0
- package/dist/index.d.cts +12 -0
- package/dist/index.d.ts +12 -0
- package/dist/index.js +11 -0
- package/dist/markup/__tests__/markup.test.cjs +464 -0
- package/dist/markup/__tests__/markup.test.d.cts +2 -0
- package/dist/markup/__tests__/markup.test.d.ts +2 -0
- package/dist/markup/__tests__/markup.test.js +441 -0
- package/dist/markup/markup.cjs +316 -0
- package/dist/markup/markup.d.cts +24 -0
- package/dist/markup/markup.d.ts +24 -0
- package/dist/markup/markup.js +254 -0
- package/dist/markup/parse.cjs +55 -0
- package/dist/markup/parse.d.cts +17 -0
- package/dist/markup/parse.d.ts +17 -0
- package/dist/markup/parse.js +43 -0
- package/dist/markup/segmentation.cjs +87 -0
- package/dist/markup/segmentation.d.cts +8 -0
- package/dist/markup/segmentation.d.ts +8 -0
- package/dist/markup/segmentation.js +67 -0
- package/dist/markup/semantics.cjs +79 -0
- package/dist/markup/semantics.d.cts +6 -0
- package/dist/markup/semantics.d.ts +6 -0
- package/dist/markup/semantics.js +53 -0
- package/dist/process/AudioEncoding.cjs +16 -0
- package/dist/process/AudioEncoding.d.cts +8 -0
- package/dist/process/AudioEncoding.d.ts +8 -0
- package/dist/process/AudioEncoding.js +0 -0
- package/dist/process/__tests__/processAudiobook.test.cjs +232 -0
- package/dist/process/__tests__/processAudiobook.test.d.cts +2 -0
- package/dist/process/__tests__/processAudiobook.test.d.ts +2 -0
- package/dist/process/__tests__/processAudiobook.test.js +209 -0
- package/dist/process/mime.cjs +43 -0
- package/dist/process/mime.d.cts +3 -0
- package/dist/process/mime.d.ts +3 -0
- package/dist/process/mime.js +24 -0
- package/dist/process/parse.cjs +84 -0
- package/dist/process/parse.d.cts +28 -0
- package/dist/process/parse.d.ts +28 -0
- package/dist/process/parse.js +73 -0
- package/dist/process/processAudiobook.cjs +220 -0
- package/dist/process/processAudiobook.d.cts +24 -0
- package/dist/process/processAudiobook.d.ts +24 -0
- package/dist/process/processAudiobook.js +166 -0
- package/dist/process/ranges.cjs +203 -0
- package/dist/process/ranges.d.cts +15 -0
- package/dist/process/ranges.d.ts +15 -0
- package/dist/process/ranges.js +137 -0
- package/dist/transcribe/parse.cjs +149 -0
- package/dist/transcribe/parse.d.cts +114 -0
- package/dist/transcribe/parse.d.ts +114 -0
- package/dist/transcribe/parse.js +143 -0
- package/dist/transcribe/transcribe.cjs +400 -0
- package/dist/transcribe/transcribe.d.cts +41 -0
- package/dist/transcribe/transcribe.d.ts +41 -0
- package/dist/transcribe/transcribe.js +330 -0
- package/package.json +96 -0
|
@@ -0,0 +1,232 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __create = Object.create;
|
|
3
|
+
var __defProp = Object.defineProperty;
|
|
4
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
5
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
6
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
7
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
8
|
+
var __export = (target, all) => {
|
|
9
|
+
for (var name in all)
|
|
10
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
11
|
+
};
|
|
12
|
+
var __copyProps = (to, from, except, desc) => {
|
|
13
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
14
|
+
for (let key of __getOwnPropNames(from))
|
|
15
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
16
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
17
|
+
}
|
|
18
|
+
return to;
|
|
19
|
+
};
|
|
20
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
21
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
22
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
23
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
24
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
25
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
26
|
+
mod
|
|
27
|
+
));
|
|
28
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
29
|
+
var ffmpeg_exports = {};
|
|
30
|
+
__export(ffmpeg_exports, {
|
|
31
|
+
getTrackDuration: () => getTrackDuration,
|
|
32
|
+
getTrackInfo: () => getTrackInfo,
|
|
33
|
+
splitFile: () => splitFile,
|
|
34
|
+
transcodeFile: () => transcodeFile
|
|
35
|
+
});
|
|
36
|
+
module.exports = __toCommonJS(ffmpeg_exports);
|
|
37
|
+
var import_node_child_process = require("node:child_process");
|
|
38
|
+
var import_promises = require("node:fs/promises");
|
|
39
|
+
var import_node_path = require("node:path");
|
|
40
|
+
var import_node_util = require("node:util");
|
|
41
|
+
var import_memoize = __toESM(require("memoize"), 1);
|
|
42
|
+
var import_mime = require("../process/mime.cjs");
|
|
43
|
+
var import_shell = require("./shell.cjs");
|
|
44
|
+
const execPromise = (0, import_node_util.promisify)(import_node_child_process.exec);
|
|
45
|
+
async function execCmd(command, logger, signal) {
|
|
46
|
+
let stdout = "";
|
|
47
|
+
let stderr = "";
|
|
48
|
+
try {
|
|
49
|
+
;
|
|
50
|
+
({ stdout, stderr } = await execPromise(command, {
|
|
51
|
+
maxBuffer: 50 * 1024 * 1024,
|
|
52
|
+
signal: signal ?? void 0
|
|
53
|
+
}));
|
|
54
|
+
return stdout;
|
|
55
|
+
} catch (error) {
|
|
56
|
+
if (error instanceof RangeError && error.message.includes("stdout maxBuffer length exceeded")) {
|
|
57
|
+
throw new Error(
|
|
58
|
+
"stdout maxBuffer length exceeded. This likely means that youre trying to process a very large file, and the ffmpeg process is running out of memory. Maybe check the image size of your cover art."
|
|
59
|
+
);
|
|
60
|
+
}
|
|
61
|
+
logger == null ? void 0 : logger.error(error);
|
|
62
|
+
logger == null ? void 0 : logger.info(stdout);
|
|
63
|
+
throw new Error(stderr);
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
const getTrackInfo = (0, import_memoize.default)(async function getTrackInfo2(path, logger) {
|
|
67
|
+
const stdout = await execCmd(
|
|
68
|
+
`ffprobe -i ${(0, import_shell.quotePath)(path)} -show_format -of json`,
|
|
69
|
+
logger
|
|
70
|
+
);
|
|
71
|
+
const info = JSON.parse(stdout);
|
|
72
|
+
return parseTrackInfo(info.format);
|
|
73
|
+
});
|
|
74
|
+
async function getTrackDuration(path, logger) {
|
|
75
|
+
const info = await getTrackInfo(path, logger);
|
|
76
|
+
return info["duration"];
|
|
77
|
+
}
|
|
78
|
+
function parseTrackInfo(format) {
|
|
79
|
+
return {
|
|
80
|
+
filename: format.filename,
|
|
81
|
+
nbStreams: format.nb_streams,
|
|
82
|
+
nbPrograms: format.nb_programs,
|
|
83
|
+
formatName: format.format_name,
|
|
84
|
+
formatLongName: format.format_long_name,
|
|
85
|
+
startTime: parseFloat(format.start_time),
|
|
86
|
+
duration: parseFloat(format.duration),
|
|
87
|
+
size: parseInt(format.size, 10),
|
|
88
|
+
bitRate: parseInt(format.bit_rate, 10),
|
|
89
|
+
probeScore: format.probe_score,
|
|
90
|
+
...format.tags && {
|
|
91
|
+
tags: {
|
|
92
|
+
majorBrand: format.tags.major_brand,
|
|
93
|
+
minorVersion: format.tags.minor_version,
|
|
94
|
+
compatibleBrands: format.tags.compatible_brands,
|
|
95
|
+
title: format.tags.title,
|
|
96
|
+
track: format.tags.track,
|
|
97
|
+
album: format.tags.album,
|
|
98
|
+
genre: format.tags.genre,
|
|
99
|
+
artist: format.tags.artist,
|
|
100
|
+
encoder: format.tags.encoder,
|
|
101
|
+
mediaType: format.tags.media_type
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
};
|
|
105
|
+
}
|
|
106
|
+
const hasCoverArt = (0, import_memoize.default)(async function hasCoverArt2(path) {
|
|
107
|
+
try {
|
|
108
|
+
const { stdout } = await execPromise(
|
|
109
|
+
`ffprobe -v quiet -show_streams -of json ${(0, import_shell.quotePath)(path)}`
|
|
110
|
+
);
|
|
111
|
+
const { streams } = JSON.parse(stdout);
|
|
112
|
+
return streams.some((stream) => stream.disposition.attached_pic === 1);
|
|
113
|
+
} catch {
|
|
114
|
+
return null;
|
|
115
|
+
}
|
|
116
|
+
});
|
|
117
|
+
async function constructExtractCoverArtCommand(source, destExtension) {
|
|
118
|
+
if (destExtension === ".wav" || !await hasCoverArt(source)) {
|
|
119
|
+
return "";
|
|
120
|
+
}
|
|
121
|
+
const command = "ffmpeg";
|
|
122
|
+
const args = [
|
|
123
|
+
"-nostdin",
|
|
124
|
+
"-i",
|
|
125
|
+
(0, import_shell.quotePath)(source),
|
|
126
|
+
"-map",
|
|
127
|
+
"0:v",
|
|
128
|
+
"-c:v",
|
|
129
|
+
"copy",
|
|
130
|
+
"-vframes",
|
|
131
|
+
"1",
|
|
132
|
+
"-f",
|
|
133
|
+
"image2",
|
|
134
|
+
"-update",
|
|
135
|
+
"1",
|
|
136
|
+
"pipe:1"
|
|
137
|
+
];
|
|
138
|
+
return `${command} ${args.join(" ")} | `;
|
|
139
|
+
}
|
|
140
|
+
function commonFfmpegArguments(sourceExtension, destExtension, codec, bitrate) {
|
|
141
|
+
const args = ["-vn"];
|
|
142
|
+
if (codec) {
|
|
143
|
+
args.push(
|
|
144
|
+
"-c:a",
|
|
145
|
+
codec,
|
|
146
|
+
...codec === "libopus" ? ["-b:a", bitrate && /^\d+[kK]$/i.test(bitrate) ? bitrate : "32K"] : [],
|
|
147
|
+
...codec === "libmp3lame" && bitrate ? ["-q:a", bitrate] : []
|
|
148
|
+
);
|
|
149
|
+
} else if ((0, import_mime.areSameType)(sourceExtension, destExtension) || destExtension == ".mp4") {
|
|
150
|
+
args.push("-c:a", "copy");
|
|
151
|
+
}
|
|
152
|
+
args.push("-map", "0:a");
|
|
153
|
+
if (destExtension === ".mp4") {
|
|
154
|
+
args.push("-map_chapters", "-1");
|
|
155
|
+
}
|
|
156
|
+
return args;
|
|
157
|
+
}
|
|
158
|
+
async function splitFile(input, output, start, end, encoding, signal, logger) {
|
|
159
|
+
if (start === end) return false;
|
|
160
|
+
logger == null ? void 0 : logger.info(
|
|
161
|
+
`Splitting ${input} start: ${start} end: ${end}${(encoding == null ? void 0 : encoding.codec) ? ` codec: ${encoding.codec}` : ""}`
|
|
162
|
+
);
|
|
163
|
+
const command = "ffmpeg";
|
|
164
|
+
const args = [
|
|
165
|
+
"-nostdin",
|
|
166
|
+
"-ss",
|
|
167
|
+
start,
|
|
168
|
+
"-to",
|
|
169
|
+
end,
|
|
170
|
+
"-i",
|
|
171
|
+
(0, import_shell.quotePath)(input),
|
|
172
|
+
...commonFfmpegArguments(
|
|
173
|
+
(0, import_node_path.extname)(input),
|
|
174
|
+
(0, import_node_path.extname)(output),
|
|
175
|
+
(encoding == null ? void 0 : encoding.codec) ?? null,
|
|
176
|
+
(encoding == null ? void 0 : encoding.bitrate) ?? null
|
|
177
|
+
),
|
|
178
|
+
(0, import_shell.quotePath)(output)
|
|
179
|
+
];
|
|
180
|
+
const coverArtCommand = await constructExtractCoverArtCommand(
|
|
181
|
+
input,
|
|
182
|
+
(0, import_node_path.extname)(output)
|
|
183
|
+
);
|
|
184
|
+
await execCmd(
|
|
185
|
+
`${coverArtCommand}${command} ${args.join(" ")}`,
|
|
186
|
+
logger,
|
|
187
|
+
signal
|
|
188
|
+
);
|
|
189
|
+
return true;
|
|
190
|
+
}
|
|
191
|
+
async function transcodeFile(input, output, encoding, signal, logger) {
|
|
192
|
+
if (!(encoding == null ? void 0 : encoding.codec) && (0, import_mime.areSameType)(input, output)) {
|
|
193
|
+
logger == null ? void 0 : logger.info(
|
|
194
|
+
`Input and output container and codec are the same, copying ${input} to output directory`
|
|
195
|
+
);
|
|
196
|
+
await (0, import_promises.copyFile)(input, output);
|
|
197
|
+
return;
|
|
198
|
+
}
|
|
199
|
+
logger == null ? void 0 : logger.info(
|
|
200
|
+
`Transcoding ${input}${(encoding == null ? void 0 : encoding.codec) ? ` codec: ${encoding.codec}` : ""}`
|
|
201
|
+
);
|
|
202
|
+
const command = "ffmpeg";
|
|
203
|
+
const args = [
|
|
204
|
+
"-nostdin",
|
|
205
|
+
"-i",
|
|
206
|
+
(0, import_shell.quotePath)(input),
|
|
207
|
+
...commonFfmpegArguments(
|
|
208
|
+
(0, import_node_path.extname)(input),
|
|
209
|
+
(0, import_node_path.extname)(output),
|
|
210
|
+
(encoding == null ? void 0 : encoding.codec) ?? null,
|
|
211
|
+
(encoding == null ? void 0 : encoding.bitrate) ?? null
|
|
212
|
+
),
|
|
213
|
+
(0, import_shell.quotePath)(output)
|
|
214
|
+
];
|
|
215
|
+
const coverArtCommand = await constructExtractCoverArtCommand(
|
|
216
|
+
input,
|
|
217
|
+
(0, import_node_path.extname)(output)
|
|
218
|
+
);
|
|
219
|
+
await execCmd(
|
|
220
|
+
`${coverArtCommand}${command} ${args.join(" ")}`,
|
|
221
|
+
logger,
|
|
222
|
+
signal
|
|
223
|
+
);
|
|
224
|
+
return true;
|
|
225
|
+
}
|
|
226
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
227
|
+
0 && (module.exports = {
|
|
228
|
+
getTrackDuration,
|
|
229
|
+
getTrackInfo,
|
|
230
|
+
splitFile,
|
|
231
|
+
transcodeFile
|
|
232
|
+
});
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
import { Logger } from 'pino';
|
|
2
|
+
import { AudioEncoding } from '../process/AudioEncoding.cjs';
|
|
3
|
+
|
|
4
|
+
declare const getTrackInfo: (path: string, logger?: Logger) => Promise<TrackInfo>;
|
|
5
|
+
declare function getTrackDuration(path: string, logger?: Logger): Promise<number>;
|
|
6
|
+
type TrackInfo = {
|
|
7
|
+
filename: string;
|
|
8
|
+
nbStreams: number;
|
|
9
|
+
nbPrograms: number;
|
|
10
|
+
formatName: string;
|
|
11
|
+
formatLongName: string;
|
|
12
|
+
startTime: number;
|
|
13
|
+
duration: number;
|
|
14
|
+
size: number;
|
|
15
|
+
bitRate: number;
|
|
16
|
+
probeScore: number;
|
|
17
|
+
tags?: {
|
|
18
|
+
majorBrand: string;
|
|
19
|
+
minorVersion: string;
|
|
20
|
+
compatibleBrands: string;
|
|
21
|
+
title: string;
|
|
22
|
+
track: string;
|
|
23
|
+
album: string;
|
|
24
|
+
genre: string;
|
|
25
|
+
artist: string;
|
|
26
|
+
encoder: string;
|
|
27
|
+
mediaType: string;
|
|
28
|
+
};
|
|
29
|
+
};
|
|
30
|
+
declare function splitFile(input: string, output: string, start: number, end: number, encoding?: AudioEncoding | null, signal?: AbortSignal | null, logger?: Logger | null): Promise<boolean>;
|
|
31
|
+
declare function transcodeFile(input: string, output: string, encoding?: AudioEncoding | null, signal?: AbortSignal | null, logger?: Logger | null): Promise<true | undefined>;
|
|
32
|
+
|
|
33
|
+
export { getTrackDuration, getTrackInfo, splitFile, transcodeFile };
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
import { Logger } from 'pino';
|
|
2
|
+
import { AudioEncoding } from '../process/AudioEncoding.js';
|
|
3
|
+
|
|
4
|
+
declare const getTrackInfo: (path: string, logger?: Logger) => Promise<TrackInfo>;
|
|
5
|
+
declare function getTrackDuration(path: string, logger?: Logger): Promise<number>;
|
|
6
|
+
type TrackInfo = {
|
|
7
|
+
filename: string;
|
|
8
|
+
nbStreams: number;
|
|
9
|
+
nbPrograms: number;
|
|
10
|
+
formatName: string;
|
|
11
|
+
formatLongName: string;
|
|
12
|
+
startTime: number;
|
|
13
|
+
duration: number;
|
|
14
|
+
size: number;
|
|
15
|
+
bitRate: number;
|
|
16
|
+
probeScore: number;
|
|
17
|
+
tags?: {
|
|
18
|
+
majorBrand: string;
|
|
19
|
+
minorVersion: string;
|
|
20
|
+
compatibleBrands: string;
|
|
21
|
+
title: string;
|
|
22
|
+
track: string;
|
|
23
|
+
album: string;
|
|
24
|
+
genre: string;
|
|
25
|
+
artist: string;
|
|
26
|
+
encoder: string;
|
|
27
|
+
mediaType: string;
|
|
28
|
+
};
|
|
29
|
+
};
|
|
30
|
+
declare function splitFile(input: string, output: string, start: number, end: number, encoding?: AudioEncoding | null, signal?: AbortSignal | null, logger?: Logger | null): Promise<boolean>;
|
|
31
|
+
declare function transcodeFile(input: string, output: string, encoding?: AudioEncoding | null, signal?: AbortSignal | null, logger?: Logger | null): Promise<true | undefined>;
|
|
32
|
+
|
|
33
|
+
export { getTrackDuration, getTrackInfo, splitFile, transcodeFile };
|
|
@@ -0,0 +1,196 @@
|
|
|
1
|
+
import "../chunk-BIEQXUOY.js";
|
|
2
|
+
import { exec } from "node:child_process";
|
|
3
|
+
import { copyFile } from "node:fs/promises";
|
|
4
|
+
import { extname } from "node:path";
|
|
5
|
+
import { promisify } from "node:util";
|
|
6
|
+
import memoize from "memoize";
|
|
7
|
+
import { areSameType } from "../process/mime.js";
|
|
8
|
+
import { quotePath } from "./shell.js";
|
|
9
|
+
const execPromise = promisify(exec);
|
|
10
|
+
async function execCmd(command, logger, signal) {
|
|
11
|
+
let stdout = "";
|
|
12
|
+
let stderr = "";
|
|
13
|
+
try {
|
|
14
|
+
;
|
|
15
|
+
({ stdout, stderr } = await execPromise(command, {
|
|
16
|
+
maxBuffer: 50 * 1024 * 1024,
|
|
17
|
+
signal: signal ?? void 0
|
|
18
|
+
}));
|
|
19
|
+
return stdout;
|
|
20
|
+
} catch (error) {
|
|
21
|
+
if (error instanceof RangeError && error.message.includes("stdout maxBuffer length exceeded")) {
|
|
22
|
+
throw new Error(
|
|
23
|
+
"stdout maxBuffer length exceeded. This likely means that youre trying to process a very large file, and the ffmpeg process is running out of memory. Maybe check the image size of your cover art."
|
|
24
|
+
);
|
|
25
|
+
}
|
|
26
|
+
logger == null ? void 0 : logger.error(error);
|
|
27
|
+
logger == null ? void 0 : logger.info(stdout);
|
|
28
|
+
throw new Error(stderr);
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
const getTrackInfo = memoize(async function getTrackInfo2(path, logger) {
|
|
32
|
+
const stdout = await execCmd(
|
|
33
|
+
`ffprobe -i ${quotePath(path)} -show_format -of json`,
|
|
34
|
+
logger
|
|
35
|
+
);
|
|
36
|
+
const info = JSON.parse(stdout);
|
|
37
|
+
return parseTrackInfo(info.format);
|
|
38
|
+
});
|
|
39
|
+
async function getTrackDuration(path, logger) {
|
|
40
|
+
const info = await getTrackInfo(path, logger);
|
|
41
|
+
return info["duration"];
|
|
42
|
+
}
|
|
43
|
+
function parseTrackInfo(format) {
|
|
44
|
+
return {
|
|
45
|
+
filename: format.filename,
|
|
46
|
+
nbStreams: format.nb_streams,
|
|
47
|
+
nbPrograms: format.nb_programs,
|
|
48
|
+
formatName: format.format_name,
|
|
49
|
+
formatLongName: format.format_long_name,
|
|
50
|
+
startTime: parseFloat(format.start_time),
|
|
51
|
+
duration: parseFloat(format.duration),
|
|
52
|
+
size: parseInt(format.size, 10),
|
|
53
|
+
bitRate: parseInt(format.bit_rate, 10),
|
|
54
|
+
probeScore: format.probe_score,
|
|
55
|
+
...format.tags && {
|
|
56
|
+
tags: {
|
|
57
|
+
majorBrand: format.tags.major_brand,
|
|
58
|
+
minorVersion: format.tags.minor_version,
|
|
59
|
+
compatibleBrands: format.tags.compatible_brands,
|
|
60
|
+
title: format.tags.title,
|
|
61
|
+
track: format.tags.track,
|
|
62
|
+
album: format.tags.album,
|
|
63
|
+
genre: format.tags.genre,
|
|
64
|
+
artist: format.tags.artist,
|
|
65
|
+
encoder: format.tags.encoder,
|
|
66
|
+
mediaType: format.tags.media_type
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
};
|
|
70
|
+
}
|
|
71
|
+
const hasCoverArt = memoize(async function hasCoverArt2(path) {
|
|
72
|
+
try {
|
|
73
|
+
const { stdout } = await execPromise(
|
|
74
|
+
`ffprobe -v quiet -show_streams -of json ${quotePath(path)}`
|
|
75
|
+
);
|
|
76
|
+
const { streams } = JSON.parse(stdout);
|
|
77
|
+
return streams.some((stream) => stream.disposition.attached_pic === 1);
|
|
78
|
+
} catch {
|
|
79
|
+
return null;
|
|
80
|
+
}
|
|
81
|
+
});
|
|
82
|
+
async function constructExtractCoverArtCommand(source, destExtension) {
|
|
83
|
+
if (destExtension === ".wav" || !await hasCoverArt(source)) {
|
|
84
|
+
return "";
|
|
85
|
+
}
|
|
86
|
+
const command = "ffmpeg";
|
|
87
|
+
const args = [
|
|
88
|
+
"-nostdin",
|
|
89
|
+
"-i",
|
|
90
|
+
quotePath(source),
|
|
91
|
+
"-map",
|
|
92
|
+
"0:v",
|
|
93
|
+
"-c:v",
|
|
94
|
+
"copy",
|
|
95
|
+
"-vframes",
|
|
96
|
+
"1",
|
|
97
|
+
"-f",
|
|
98
|
+
"image2",
|
|
99
|
+
"-update",
|
|
100
|
+
"1",
|
|
101
|
+
"pipe:1"
|
|
102
|
+
];
|
|
103
|
+
return `${command} ${args.join(" ")} | `;
|
|
104
|
+
}
|
|
105
|
+
function commonFfmpegArguments(sourceExtension, destExtension, codec, bitrate) {
|
|
106
|
+
const args = ["-vn"];
|
|
107
|
+
if (codec) {
|
|
108
|
+
args.push(
|
|
109
|
+
"-c:a",
|
|
110
|
+
codec,
|
|
111
|
+
...codec === "libopus" ? ["-b:a", bitrate && /^\d+[kK]$/i.test(bitrate) ? bitrate : "32K"] : [],
|
|
112
|
+
...codec === "libmp3lame" && bitrate ? ["-q:a", bitrate] : []
|
|
113
|
+
);
|
|
114
|
+
} else if (areSameType(sourceExtension, destExtension) || destExtension == ".mp4") {
|
|
115
|
+
args.push("-c:a", "copy");
|
|
116
|
+
}
|
|
117
|
+
args.push("-map", "0:a");
|
|
118
|
+
if (destExtension === ".mp4") {
|
|
119
|
+
args.push("-map_chapters", "-1");
|
|
120
|
+
}
|
|
121
|
+
return args;
|
|
122
|
+
}
|
|
123
|
+
async function splitFile(input, output, start, end, encoding, signal, logger) {
|
|
124
|
+
if (start === end) return false;
|
|
125
|
+
logger == null ? void 0 : logger.info(
|
|
126
|
+
`Splitting ${input} start: ${start} end: ${end}${(encoding == null ? void 0 : encoding.codec) ? ` codec: ${encoding.codec}` : ""}`
|
|
127
|
+
);
|
|
128
|
+
const command = "ffmpeg";
|
|
129
|
+
const args = [
|
|
130
|
+
"-nostdin",
|
|
131
|
+
"-ss",
|
|
132
|
+
start,
|
|
133
|
+
"-to",
|
|
134
|
+
end,
|
|
135
|
+
"-i",
|
|
136
|
+
quotePath(input),
|
|
137
|
+
...commonFfmpegArguments(
|
|
138
|
+
extname(input),
|
|
139
|
+
extname(output),
|
|
140
|
+
(encoding == null ? void 0 : encoding.codec) ?? null,
|
|
141
|
+
(encoding == null ? void 0 : encoding.bitrate) ?? null
|
|
142
|
+
),
|
|
143
|
+
quotePath(output)
|
|
144
|
+
];
|
|
145
|
+
const coverArtCommand = await constructExtractCoverArtCommand(
|
|
146
|
+
input,
|
|
147
|
+
extname(output)
|
|
148
|
+
);
|
|
149
|
+
await execCmd(
|
|
150
|
+
`${coverArtCommand}${command} ${args.join(" ")}`,
|
|
151
|
+
logger,
|
|
152
|
+
signal
|
|
153
|
+
);
|
|
154
|
+
return true;
|
|
155
|
+
}
|
|
156
|
+
async function transcodeFile(input, output, encoding, signal, logger) {
|
|
157
|
+
if (!(encoding == null ? void 0 : encoding.codec) && areSameType(input, output)) {
|
|
158
|
+
logger == null ? void 0 : logger.info(
|
|
159
|
+
`Input and output container and codec are the same, copying ${input} to output directory`
|
|
160
|
+
);
|
|
161
|
+
await copyFile(input, output);
|
|
162
|
+
return;
|
|
163
|
+
}
|
|
164
|
+
logger == null ? void 0 : logger.info(
|
|
165
|
+
`Transcoding ${input}${(encoding == null ? void 0 : encoding.codec) ? ` codec: ${encoding.codec}` : ""}`
|
|
166
|
+
);
|
|
167
|
+
const command = "ffmpeg";
|
|
168
|
+
const args = [
|
|
169
|
+
"-nostdin",
|
|
170
|
+
"-i",
|
|
171
|
+
quotePath(input),
|
|
172
|
+
...commonFfmpegArguments(
|
|
173
|
+
extname(input),
|
|
174
|
+
extname(output),
|
|
175
|
+
(encoding == null ? void 0 : encoding.codec) ?? null,
|
|
176
|
+
(encoding == null ? void 0 : encoding.bitrate) ?? null
|
|
177
|
+
),
|
|
178
|
+
quotePath(output)
|
|
179
|
+
];
|
|
180
|
+
const coverArtCommand = await constructExtractCoverArtCommand(
|
|
181
|
+
input,
|
|
182
|
+
extname(output)
|
|
183
|
+
);
|
|
184
|
+
await execCmd(
|
|
185
|
+
`${coverArtCommand}${command} ${args.join(" ")}`,
|
|
186
|
+
logger,
|
|
187
|
+
signal
|
|
188
|
+
);
|
|
189
|
+
return true;
|
|
190
|
+
}
|
|
191
|
+
export {
|
|
192
|
+
getTrackDuration,
|
|
193
|
+
getTrackInfo,
|
|
194
|
+
splitFile,
|
|
195
|
+
transcodeFile
|
|
196
|
+
};
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __create = Object.create;
|
|
3
|
+
var __defProp = Object.defineProperty;
|
|
4
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
5
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
6
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
7
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
8
|
+
var __export = (target, all) => {
|
|
9
|
+
for (var name in all)
|
|
10
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
11
|
+
};
|
|
12
|
+
var __copyProps = (to, from, except, desc) => {
|
|
13
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
14
|
+
for (let key of __getOwnPropNames(from))
|
|
15
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
16
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
17
|
+
}
|
|
18
|
+
return to;
|
|
19
|
+
};
|
|
20
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
21
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
22
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
23
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
24
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
25
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
26
|
+
mod
|
|
27
|
+
));
|
|
28
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
29
|
+
var logging_exports = {};
|
|
30
|
+
__export(logging_exports, {
|
|
31
|
+
createLogger: () => createLogger
|
|
32
|
+
});
|
|
33
|
+
module.exports = __toCommonJS(logging_exports);
|
|
34
|
+
var import_pino = __toESM(require("pino"), 1);
|
|
35
|
+
var import_pino_pretty = __toESM(require("pino-pretty"), 1);
|
|
36
|
+
function createLogger(level = "info") {
|
|
37
|
+
return (0, import_pino.default)(
|
|
38
|
+
{ level },
|
|
39
|
+
(0, import_pino_pretty.default)({ ignore: "pid,hostname", translateTime: "SYS:standard" })
|
|
40
|
+
);
|
|
41
|
+
}
|
|
42
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
43
|
+
0 && (module.exports = {
|
|
44
|
+
createLogger
|
|
45
|
+
});
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import "../chunk-BIEQXUOY.js";
|
|
2
|
+
import pino from "pino";
|
|
3
|
+
import PinoPretty from "pino-pretty";
|
|
4
|
+
function createLogger(level = "info") {
|
|
5
|
+
return pino(
|
|
6
|
+
{ level },
|
|
7
|
+
PinoPretty({ ignore: "pid,hostname", translateTime: "SYS:standard" })
|
|
8
|
+
);
|
|
9
|
+
}
|
|
10
|
+
export {
|
|
11
|
+
createLogger
|
|
12
|
+
};
|
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
|
+
var __export = (target, all) => {
|
|
7
|
+
for (var name in all)
|
|
8
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
9
|
+
};
|
|
10
|
+
var __copyProps = (to, from, except, desc) => {
|
|
11
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
12
|
+
for (let key of __getOwnPropNames(from))
|
|
13
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
14
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
15
|
+
}
|
|
16
|
+
return to;
|
|
17
|
+
};
|
|
18
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
19
|
+
var parse_exports = {};
|
|
20
|
+
__export(parse_exports, {
|
|
21
|
+
granularityParser: () => granularityParser,
|
|
22
|
+
languageParser: () => languageParser,
|
|
23
|
+
loggingParser: () => loggingParser,
|
|
24
|
+
parallelismParser: () => parallelismParser
|
|
25
|
+
});
|
|
26
|
+
module.exports = __toCommonJS(parse_exports);
|
|
27
|
+
var import_core = require("@optique/core");
|
|
28
|
+
const loggingParser = (0, import_core.object)({
|
|
29
|
+
noProgress: (0, import_core.option)("--no-progress", {
|
|
30
|
+
description: import_core.message`Disable the progress bar`
|
|
31
|
+
}),
|
|
32
|
+
logLevel: (0, import_core.withDefault)(
|
|
33
|
+
(0, import_core.option)(
|
|
34
|
+
"--log-level",
|
|
35
|
+
(0, import_core.choice)(["silent", "debug", "info", "warn", "error"]),
|
|
36
|
+
{
|
|
37
|
+
description: import_core.message`Log level. If enabled, will disable progress bar.`
|
|
38
|
+
}
|
|
39
|
+
),
|
|
40
|
+
"silent"
|
|
41
|
+
),
|
|
42
|
+
time: (0, import_core.option)("--time", {
|
|
43
|
+
description: import_core.message`Whether to print timing statistics`
|
|
44
|
+
})
|
|
45
|
+
});
|
|
46
|
+
const granularityParser = (0, import_core.object)({
|
|
47
|
+
granularity: (0, import_core.withDefault)(
|
|
48
|
+
(0, import_core.option)("--granularity", "-g", (0, import_core.choice)(["word", "sentence"])),
|
|
49
|
+
"sentence"
|
|
50
|
+
)
|
|
51
|
+
});
|
|
52
|
+
const languageParser = (0, import_core.object)({
|
|
53
|
+
language: (0, import_core.optional)(
|
|
54
|
+
(0, import_core.option)("--language", (0, import_core.locale)(), {
|
|
55
|
+
description: import_core.message`BCP 47 language tag representing the primary language of the audio (e.g. en-US)`
|
|
56
|
+
})
|
|
57
|
+
)
|
|
58
|
+
});
|
|
59
|
+
const parallelismParser = (0, import_core.object)({
|
|
60
|
+
parallelism: (0, import_core.withDefault)(
|
|
61
|
+
(0, import_core.option)("--parallel", (0, import_core.integer)(), {
|
|
62
|
+
description: import_core.message`How many files to attempt to transcode in parallel.`
|
|
63
|
+
}),
|
|
64
|
+
1
|
|
65
|
+
)
|
|
66
|
+
});
|
|
67
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
68
|
+
0 && (module.exports = {
|
|
69
|
+
granularityParser,
|
|
70
|
+
languageParser,
|
|
71
|
+
loggingParser,
|
|
72
|
+
parallelismParser
|
|
73
|
+
});
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
import * as _optique_core from '@optique/core';
|
|
2
|
+
|
|
3
|
+
declare const loggingParser: _optique_core.Parser<"sync", {
|
|
4
|
+
readonly noProgress: boolean;
|
|
5
|
+
readonly logLevel: "silent" | "debug" | "info" | "warn" | "error";
|
|
6
|
+
readonly time: boolean;
|
|
7
|
+
}, {
|
|
8
|
+
readonly noProgress: _optique_core.ValueParserResult<boolean> | undefined;
|
|
9
|
+
readonly logLevel: [_optique_core.ValueParserResult<"silent" | "debug" | "info" | "warn" | "error"> | undefined] | undefined;
|
|
10
|
+
readonly time: _optique_core.ValueParserResult<boolean> | undefined;
|
|
11
|
+
}>;
|
|
12
|
+
declare const granularityParser: _optique_core.Parser<"sync", {
|
|
13
|
+
readonly granularity: "word" | "sentence";
|
|
14
|
+
}, {
|
|
15
|
+
readonly granularity: [_optique_core.ValueParserResult<"word" | "sentence"> | undefined] | undefined;
|
|
16
|
+
}>;
|
|
17
|
+
declare const languageParser: _optique_core.Parser<"sync", {
|
|
18
|
+
readonly language: Intl.Locale | undefined;
|
|
19
|
+
}, {
|
|
20
|
+
readonly language: [_optique_core.ValueParserResult<Intl.Locale> | undefined] | undefined;
|
|
21
|
+
}>;
|
|
22
|
+
declare const parallelismParser: _optique_core.Parser<"sync", {
|
|
23
|
+
readonly parallelism: number;
|
|
24
|
+
}, {
|
|
25
|
+
readonly parallelism: [_optique_core.ValueParserResult<number> | undefined] | undefined;
|
|
26
|
+
}>;
|
|
27
|
+
|
|
28
|
+
export { granularityParser, languageParser, loggingParser, parallelismParser };
|