@redaksjon/protokoll-engine 0.1.1-dev.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +47 -0
- package/dist/agentic/executor.d.ts +21 -0
- package/dist/agentic/executor.d.ts.map +1 -0
- package/dist/agentic/index.d.ts +27 -0
- package/dist/agentic/index.d.ts.map +1 -0
- package/dist/agentic/registry.d.ts +11 -0
- package/dist/agentic/registry.d.ts.map +1 -0
- package/dist/agentic/tools/lookup-person.d.ts +3 -0
- package/dist/agentic/tools/lookup-person.d.ts.map +1 -0
- package/dist/agentic/tools/lookup-project.d.ts +3 -0
- package/dist/agentic/tools/lookup-project.d.ts.map +1 -0
- package/dist/agentic/tools/route-note.d.ts +3 -0
- package/dist/agentic/tools/route-note.d.ts.map +1 -0
- package/dist/agentic/tools/store-context.d.ts +3 -0
- package/dist/agentic/tools/store-context.d.ts.map +1 -0
- package/dist/agentic/tools/verify-spelling.d.ts +3 -0
- package/dist/agentic/tools/verify-spelling.d.ts.map +1 -0
- package/dist/agentic/types.d.ts +110 -0
- package/dist/agentic/types.d.ts.map +1 -0
- package/dist/constants.d.ts +98 -0
- package/dist/constants.d.ts.map +1 -0
- package/dist/feedback/analyzer.d.ts +13 -0
- package/dist/feedback/analyzer.d.ts.map +1 -0
- package/dist/feedback/decision-tracker.d.ts +14 -0
- package/dist/feedback/decision-tracker.d.ts.map +1 -0
- package/dist/feedback/handler.d.ts +14 -0
- package/dist/feedback/handler.d.ts.map +1 -0
- package/dist/feedback/index.d.ts +12 -0
- package/dist/feedback/index.d.ts.map +1 -0
- package/dist/feedback/types.d.ts +72 -0
- package/dist/feedback/types.d.ts.map +1 -0
- package/dist/index.d.ts +24 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +32 -0
- package/dist/index.js.map +1 -0
- package/dist/index10.js +4 -0
- package/dist/index10.js.map +1 -0
- package/dist/index11.js +22 -0
- package/dist/index11.js.map +1 -0
- package/dist/index12.js +125 -0
- package/dist/index12.js.map +1 -0
- package/dist/index13.js +124 -0
- package/dist/index13.js.map +1 -0
- package/dist/index14.js +296 -0
- package/dist/index14.js.map +1 -0
- package/dist/index15.js +100 -0
- package/dist/index15.js.map +1 -0
- package/dist/index16.js +107 -0
- package/dist/index16.js.map +1 -0
- package/dist/index17.js +185 -0
- package/dist/index17.js.map +1 -0
- package/dist/index18.js +53 -0
- package/dist/index18.js.map +1 -0
- package/dist/index19.js +19 -0
- package/dist/index19.js.map +1 -0
- package/dist/index2.js +33 -0
- package/dist/index2.js.map +1 -0
- package/dist/index20.js +105 -0
- package/dist/index20.js.map +1 -0
- package/dist/index21.js +26 -0
- package/dist/index21.js.map +1 -0
- package/dist/index22.js +49 -0
- package/dist/index22.js.map +1 -0
- package/dist/index23.js +119 -0
- package/dist/index23.js.map +1 -0
- package/dist/index24.js +330 -0
- package/dist/index24.js.map +1 -0
- package/dist/index25.js +57 -0
- package/dist/index25.js.map +1 -0
- package/dist/index26.js +38 -0
- package/dist/index26.js.map +1 -0
- package/dist/index27.js +127 -0
- package/dist/index27.js.map +1 -0
- package/dist/index28.js +157 -0
- package/dist/index28.js.map +1 -0
- package/dist/index29.js +163 -0
- package/dist/index29.js.map +1 -0
- package/dist/index3.js +36 -0
- package/dist/index3.js.map +1 -0
- package/dist/index30.js +173 -0
- package/dist/index30.js.map +1 -0
- package/dist/index31.js +423 -0
- package/dist/index31.js.map +1 -0
- package/dist/index32.js +161 -0
- package/dist/index32.js.map +1 -0
- package/dist/index33.js +152 -0
- package/dist/index33.js.map +1 -0
- package/dist/index34.js +56 -0
- package/dist/index34.js.map +1 -0
- package/dist/index35.js +103 -0
- package/dist/index35.js.map +1 -0
- package/dist/index36.js +451 -0
- package/dist/index36.js.map +1 -0
- package/dist/index37.js +431 -0
- package/dist/index37.js.map +1 -0
- package/dist/index38.js +87 -0
- package/dist/index38.js.map +1 -0
- package/dist/index39.js +122 -0
- package/dist/index39.js.map +1 -0
- package/dist/index4.js +3 -0
- package/dist/index4.js.map +1 -0
- package/dist/index40.js +299 -0
- package/dist/index40.js.map +1 -0
- package/dist/index41.js +49 -0
- package/dist/index41.js.map +1 -0
- package/dist/index42.js +151 -0
- package/dist/index42.js.map +1 -0
- package/dist/index43.js +226 -0
- package/dist/index43.js.map +1 -0
- package/dist/index44.js +49 -0
- package/dist/index44.js.map +1 -0
- package/dist/index45.js +45 -0
- package/dist/index45.js.map +1 -0
- package/dist/index46.js +37 -0
- package/dist/index46.js.map +1 -0
- package/dist/index47.js +51 -0
- package/dist/index47.js.map +1 -0
- package/dist/index48.js +39 -0
- package/dist/index48.js.map +1 -0
- package/dist/index49.js +239 -0
- package/dist/index49.js.map +1 -0
- package/dist/index5.js +17 -0
- package/dist/index5.js.map +1 -0
- package/dist/index50.js +163 -0
- package/dist/index50.js.map +1 -0
- package/dist/index51.js +81 -0
- package/dist/index51.js.map +1 -0
- package/dist/index52.js +78 -0
- package/dist/index52.js.map +1 -0
- package/dist/index53.js +22 -0
- package/dist/index53.js.map +1 -0
- package/dist/index54.js +8 -0
- package/dist/index54.js.map +1 -0
- package/dist/index55.js +8 -0
- package/dist/index55.js.map +1 -0
- package/dist/index56.js +17 -0
- package/dist/index56.js.map +1 -0
- package/dist/index57.js +4 -0
- package/dist/index57.js.map +1 -0
- package/dist/index58.js +17 -0
- package/dist/index58.js.map +1 -0
- package/dist/index59.js +4 -0
- package/dist/index59.js.map +1 -0
- package/dist/index6.js +22 -0
- package/dist/index6.js.map +1 -0
- package/dist/index60.js +6 -0
- package/dist/index60.js.map +1 -0
- package/dist/index7.js +27 -0
- package/dist/index7.js.map +1 -0
- package/dist/index8.js +22 -0
- package/dist/index8.js.map +1 -0
- package/dist/index9.js +5 -0
- package/dist/index9.js.map +1 -0
- package/dist/logging.d.ts +7 -0
- package/dist/logging.d.ts.map +1 -0
- package/dist/output/index.d.ts +15 -0
- package/dist/output/index.d.ts.map +1 -0
- package/dist/phases/complete.d.ts +17 -0
- package/dist/phases/complete.d.ts.map +1 -0
- package/dist/phases/index.d.ts +5 -0
- package/dist/phases/index.d.ts.map +1 -0
- package/dist/phases/locate.d.ts +15 -0
- package/dist/phases/locate.d.ts.map +1 -0
- package/dist/phases/simple-replace.d.ts +72 -0
- package/dist/phases/simple-replace.d.ts.map +1 -0
- package/dist/phases/transcribe.d.ts +19 -0
- package/dist/phases/transcribe.d.ts.map +1 -0
- package/dist/pipeline/index.d.ts +10 -0
- package/dist/pipeline/index.d.ts.map +1 -0
- package/dist/pipeline/orchestrator.d.ts +13 -0
- package/dist/pipeline/orchestrator.d.ts.map +1 -0
- package/dist/pipeline/types.d.ts +58 -0
- package/dist/pipeline/types.d.ts.map +1 -0
- package/dist/prompt/index.d.ts +3 -0
- package/dist/prompt/index.d.ts.map +1 -0
- package/dist/prompt/templates.d.ts +40 -0
- package/dist/prompt/templates.d.ts.map +1 -0
- package/dist/prompt/transcribe.d.ts +42 -0
- package/dist/prompt/transcribe.d.ts.map +1 -0
- package/dist/reasoning/client.d.ts +42 -0
- package/dist/reasoning/client.d.ts.map +1 -0
- package/dist/reasoning/index.d.ts +17 -0
- package/dist/reasoning/index.d.ts.map +1 -0
- package/dist/reasoning/strategy.d.ts +12 -0
- package/dist/reasoning/strategy.d.ts.map +1 -0
- package/dist/reasoning/types.d.ts +58 -0
- package/dist/reasoning/types.d.ts.map +1 -0
- package/dist/reflection/collector.d.ts +18 -0
- package/dist/reflection/collector.d.ts.map +1 -0
- package/dist/reflection/index.d.ts +13 -0
- package/dist/reflection/index.d.ts.map +1 -0
- package/dist/reflection/reporter.d.ts +10 -0
- package/dist/reflection/reporter.d.ts.map +1 -0
- package/dist/reflection/types.d.ts +99 -0
- package/dist/reflection/types.d.ts.map +1 -0
- package/dist/routing/classifier.d.ts +8 -0
- package/dist/routing/classifier.d.ts.map +1 -0
- package/dist/routing/index.d.ts +12 -0
- package/dist/routing/index.d.ts.map +1 -0
- package/dist/routing/router.d.ts +8 -0
- package/dist/routing/router.d.ts.map +1 -0
- package/dist/routing/types.d.ts +68 -0
- package/dist/routing/types.d.ts.map +1 -0
- package/dist/transcript/feedback.d.ts +70 -0
- package/dist/transcript/feedback.d.ts.map +1 -0
- package/dist/transcript/index.d.ts +10 -0
- package/dist/transcript/index.d.ts.map +1 -0
- package/dist/transcript/operations.d.ts +152 -0
- package/dist/transcript/operations.d.ts.map +1 -0
- package/dist/transcript/pkl-utils.d.ts +66 -0
- package/dist/transcript/pkl-utils.d.ts.map +1 -0
- package/dist/transcription/index.d.ts +17 -0
- package/dist/transcription/index.d.ts.map +1 -0
- package/dist/transcription/service.d.ts +10 -0
- package/dist/transcription/service.d.ts.map +1 -0
- package/dist/transcription/types.d.ts +41 -0
- package/dist/transcription/types.d.ts.map +1 -0
- package/dist/types.d.ts +28 -0
- package/dist/types.d.ts.map +1 -0
- package/dist/util/collision-detector.d.ts +77 -0
- package/dist/util/collision-detector.d.ts.map +1 -0
- package/dist/util/dates.d.ts +57 -0
- package/dist/util/dates.d.ts.map +1 -0
- package/dist/util/general.d.ts +3 -0
- package/dist/util/general.d.ts.map +1 -0
- package/dist/util/media.d.ts +9 -0
- package/dist/util/media.d.ts.map +1 -0
- package/dist/util/metadata.d.ts +138 -0
- package/dist/util/metadata.d.ts.map +1 -0
- package/dist/util/openai.d.ts +22 -0
- package/dist/util/openai.d.ts.map +1 -0
- package/dist/util/sounds-like-database.d.ts +98 -0
- package/dist/util/sounds-like-database.d.ts.map +1 -0
- package/dist/util/storage.d.ts +35 -0
- package/dist/util/storage.d.ts.map +1 -0
- package/dist/util/text-replacer.d.ts +56 -0
- package/dist/util/text-replacer.d.ts.map +1 -0
- package/dist/utils/entityFinder.d.ts +29 -0
- package/dist/utils/entityFinder.d.ts.map +1 -0
- package/package.json +84 -0
package/dist/index11.js
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
import { create as create$2 } from './index39.js';
|
|
2
|
+
import { create as create$1 } from './index40.js';
|
|
3
|
+
|
|
4
|
+
const create = (config) => {
|
|
5
|
+
const collector = create$2();
|
|
6
|
+
const reporter = create$1(config);
|
|
7
|
+
return {
|
|
8
|
+
collector,
|
|
9
|
+
reporter,
|
|
10
|
+
generate: (audioFile, outputFile, conversationHistory, output) => reporter.generate(collector, audioFile, outputFile, conversationHistory, output),
|
|
11
|
+
save: (report, path) => reporter.save(report, path)
|
|
12
|
+
};
|
|
13
|
+
};
|
|
14
|
+
const DEFAULT_REFLECTION_CONFIG = {
|
|
15
|
+
enabled: false,
|
|
16
|
+
format: "markdown",
|
|
17
|
+
includeConversation: false,
|
|
18
|
+
includeOutput: true
|
|
19
|
+
};
|
|
20
|
+
|
|
21
|
+
export { DEFAULT_REFLECTION_CONFIG, create };
|
|
22
|
+
//# sourceMappingURL=index11.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index11.js","sources":["../src/reflection/index.ts"],"sourcesContent":["/**\n * Self-Reflection System\n *\n * Main entry point for the self-reflection system. Provides metrics collection\n * and report generation for transcription quality analysis.\n */\n\nimport { ReflectionConfig, ReflectionReport } from './types';\nimport * as Collector from './collector';\nimport * as Reporter from './reporter';\n\nexport interface ReflectionInstance {\n collector: Collector.CollectorInstance;\n reporter: Reporter.ReporterInstance;\n generate(\n audioFile: string,\n outputFile: string,\n conversationHistory?: unknown[],\n output?: string\n ): ReflectionReport;\n save(report: ReflectionReport, path: string): Promise<void>;\n}\n\nexport const create = (config: ReflectionConfig): ReflectionInstance => {\n const collector = Collector.create();\n const reporter = Reporter.create(config);\n \n return {\n collector,\n reporter,\n generate: (audioFile, outputFile, conversationHistory, output) => \n reporter.generate(collector, audioFile, outputFile, conversationHistory, output),\n save: (report, path) => reporter.save(report, path),\n };\n};\n\nexport const DEFAULT_REFLECTION_CONFIG: ReflectionConfig = {\n enabled: false,\n format: 'markdown',\n includeConversation: false,\n includeOutput: true,\n};\n\n// Re-export types\nexport * from './types';\n\n"],"names":["Collector.create","Reporter.create"],"mappings":";;;AAuBO,MAAM,MAAA,GAAS,CAAC,MAAA,KAAiD;AACpE,EAAA,MAAM,SAAA,GAAYA,QAAU,EAAO;AACnC,EAAA,MAAM,QAAA,GAAWC,QAAS,CAAO,MAAM,CAAA;AAEvC,EAAA,OAAO;AAAA,IACH,SAAA;AAAA,IACA,QAAA;AAAA,IACA,QAAA,EAAU,CAAC,SAAA,EAAW,UAAA,EAAY,mBAAA,EAAqB,MAAA,KACnD,QAAA,CAAS,QAAA,CAAS,SAAA,EAAW,SAAA,EAAW,UAAA,EAAY,mBAAA,EAAqB,MAAM,CAAA;AAAA,IACnF,MAAM,CAAC,MAAA,EAAQ,SAAS,QAAA,CAAS,IAAA,CAAK,QAAQ,IAAI;AAAA,GACtD;AACJ;AAEO,MAAM,yBAAA,GAA8C;AAAA,EACvD,OAAA,EAAS,KAAA;AAAA,EACT,MAAA,EAAQ,UAAA;AAAA,EACR,mBAAA,EAAqB,KAAA;AAAA,EACrB,aAAA,EAAe;AACnB;;;;"}
|
package/dist/index12.js
ADDED
|
@@ -0,0 +1,125 @@
|
|
|
1
|
+
import * as fs from 'node:fs';
|
|
2
|
+
import { glob } from 'glob';
|
|
3
|
+
import path__default from 'node:path';
|
|
4
|
+
import crypto from 'node:crypto';
|
|
5
|
+
|
|
6
|
+
const create = (params) => {
|
|
7
|
+
const log = params.log || console.log;
|
|
8
|
+
const exists = async (path2) => {
|
|
9
|
+
try {
|
|
10
|
+
await fs.promises.stat(path2);
|
|
11
|
+
return true;
|
|
12
|
+
} catch (error) {
|
|
13
|
+
return false;
|
|
14
|
+
}
|
|
15
|
+
};
|
|
16
|
+
const isDirectory = async (path2) => {
|
|
17
|
+
const stats = await fs.promises.stat(path2);
|
|
18
|
+
if (!stats.isDirectory()) {
|
|
19
|
+
log(`${path2} is not a directory`);
|
|
20
|
+
return false;
|
|
21
|
+
}
|
|
22
|
+
return true;
|
|
23
|
+
};
|
|
24
|
+
const isFile = async (path2) => {
|
|
25
|
+
const stats = await fs.promises.stat(path2);
|
|
26
|
+
if (!stats.isFile()) {
|
|
27
|
+
log(`${path2} is not a file`);
|
|
28
|
+
return false;
|
|
29
|
+
}
|
|
30
|
+
return true;
|
|
31
|
+
};
|
|
32
|
+
const isReadable = async (path2) => {
|
|
33
|
+
try {
|
|
34
|
+
await fs.promises.access(path2, fs.constants.R_OK);
|
|
35
|
+
} catch (error) {
|
|
36
|
+
log(`${path2} is not readable: %s %s`, error.message, error.stack);
|
|
37
|
+
return false;
|
|
38
|
+
}
|
|
39
|
+
return true;
|
|
40
|
+
};
|
|
41
|
+
const isWritable = async (path2) => {
|
|
42
|
+
try {
|
|
43
|
+
await fs.promises.access(path2, fs.constants.W_OK);
|
|
44
|
+
} catch (error) {
|
|
45
|
+
log(`${path2} is not writable: %s %s`, error.message, error.stack);
|
|
46
|
+
return false;
|
|
47
|
+
}
|
|
48
|
+
return true;
|
|
49
|
+
};
|
|
50
|
+
const isFileReadable = async (path2) => {
|
|
51
|
+
return await exists(path2) && await isFile(path2) && await isReadable(path2);
|
|
52
|
+
};
|
|
53
|
+
const isDirectoryWritable = async (path2) => {
|
|
54
|
+
return await exists(path2) && await isDirectory(path2) && await isWritable(path2);
|
|
55
|
+
};
|
|
56
|
+
const isDirectoryReadable = async (path2) => {
|
|
57
|
+
return await exists(path2) && await isDirectory(path2) && await isReadable(path2);
|
|
58
|
+
};
|
|
59
|
+
const createDirectory = async (path2) => {
|
|
60
|
+
try {
|
|
61
|
+
await fs.promises.mkdir(path2, { recursive: true });
|
|
62
|
+
} catch (mkdirError) {
|
|
63
|
+
throw new Error(`Failed to create output directory ${path2}: ${mkdirError.message} ${mkdirError.stack}`);
|
|
64
|
+
}
|
|
65
|
+
};
|
|
66
|
+
const readFile = async (path2, encoding) => {
|
|
67
|
+
return await fs.promises.readFile(path2, { encoding });
|
|
68
|
+
};
|
|
69
|
+
const writeFile = async (path2, data, encoding) => {
|
|
70
|
+
await fs.promises.writeFile(path2, data, { encoding });
|
|
71
|
+
};
|
|
72
|
+
const forEachFileIn = async (directory, callback, options = { pattern: "*.*" }) => {
|
|
73
|
+
try {
|
|
74
|
+
const files = await glob(options.pattern, { cwd: directory, nodir: true });
|
|
75
|
+
for (const file of files) {
|
|
76
|
+
await callback(path__default.join(directory, file));
|
|
77
|
+
}
|
|
78
|
+
} catch (err) {
|
|
79
|
+
throw new Error(`Failed to glob pattern ${options.pattern} in ${directory}: ${err.message}`);
|
|
80
|
+
}
|
|
81
|
+
};
|
|
82
|
+
const readStream = async (path2) => {
|
|
83
|
+
return fs.createReadStream(path2);
|
|
84
|
+
};
|
|
85
|
+
const hashFile = async (path2, length) => {
|
|
86
|
+
const file = await readFile(path2, "utf8");
|
|
87
|
+
return crypto.createHash("sha256").update(file).digest("hex").slice(0, length);
|
|
88
|
+
};
|
|
89
|
+
const listFiles = async (directory) => {
|
|
90
|
+
return await fs.promises.readdir(directory);
|
|
91
|
+
};
|
|
92
|
+
const deleteFile = async (path2) => {
|
|
93
|
+
await fs.promises.unlink(path2);
|
|
94
|
+
};
|
|
95
|
+
const deleteDirectory = async (path2) => {
|
|
96
|
+
await fs.promises.rm(path2, { recursive: true, force: true });
|
|
97
|
+
};
|
|
98
|
+
const getFileSize = async (path2) => {
|
|
99
|
+
const stats = await fs.promises.stat(path2);
|
|
100
|
+
return stats.size;
|
|
101
|
+
};
|
|
102
|
+
return {
|
|
103
|
+
exists,
|
|
104
|
+
isDirectory,
|
|
105
|
+
isFile,
|
|
106
|
+
isReadable,
|
|
107
|
+
isWritable,
|
|
108
|
+
isFileReadable,
|
|
109
|
+
isDirectoryWritable,
|
|
110
|
+
isDirectoryReadable,
|
|
111
|
+
createDirectory,
|
|
112
|
+
readFile,
|
|
113
|
+
readStream,
|
|
114
|
+
writeFile,
|
|
115
|
+
forEachFileIn,
|
|
116
|
+
hashFile,
|
|
117
|
+
listFiles,
|
|
118
|
+
deleteFile,
|
|
119
|
+
deleteDirectory,
|
|
120
|
+
getFileSize
|
|
121
|
+
};
|
|
122
|
+
};
|
|
123
|
+
|
|
124
|
+
export { create };
|
|
125
|
+
//# sourceMappingURL=index12.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index12.js","sources":["../src/util/storage.ts"],"sourcesContent":["// eslint-disable-next-line no-restricted-imports\nimport * as fs from 'node:fs';\nimport { glob } from 'glob';\nimport path from 'node:path';\nimport crypto from 'node:crypto';\n/**\n * This module exists to isolate filesystem operations from the rest of the codebase.\n * This makes testing easier by avoiding direct fs mocking in jest configuration.\n * \n * Additionally, abstracting storage operations allows for future flexibility - \n * this export utility may need to work with storage systems other than the local filesystem\n * (e.g. S3, Google Cloud Storage, etc).\n */\n\nexport interface Utility {\n exists: (path: string) => Promise<boolean>;\n isDirectory: (path: string) => Promise<boolean>;\n isFile: (path: string) => Promise<boolean>;\n isReadable: (path: string) => Promise<boolean>;\n isWritable: (path: string) => Promise<boolean>;\n isFileReadable: (path: string) => Promise<boolean>;\n isDirectoryWritable: (path: string) => Promise<boolean>;\n isDirectoryReadable: (path: string) => Promise<boolean>;\n createDirectory: (path: string) => Promise<void>;\n readFile: (path: string, encoding: string) => Promise<string>;\n readStream: (path: string) => Promise<fs.ReadStream>;\n writeFile: (path: string, data: string | Buffer, encoding: string) => Promise<void>;\n forEachFileIn: (directory: string, callback: (path: string) => Promise<void>, options?: { pattern: string }) => Promise<void>;\n hashFile: (path: string, length: number) => Promise<string>;\n listFiles: (directory: string) => Promise<string[]>;\n deleteFile: (path: string) => Promise<void>;\n deleteDirectory: (path: string) => Promise<void>;\n getFileSize: (path: string) => Promise<number>;\n}\n\nexport const create = (params: { log?: (message: string, ...args: any[]) => void }): Utility => {\n\n // eslint-disable-next-line no-console\n const log = params.log || console.log;\n\n const exists = async (path: string): Promise<boolean> => {\n try {\n await fs.promises.stat(path);\n return true;\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n } catch (error: any) {\n return false;\n }\n }\n\n const isDirectory = async (path: string): Promise<boolean> => {\n const stats = await fs.promises.stat(path);\n if (!stats.isDirectory()) {\n log(`${path} is not a directory`);\n return false;\n }\n return true;\n }\n\n const isFile = async (path: string): Promise<boolean> => {\n const stats = await fs.promises.stat(path);\n if (!stats.isFile()) {\n log(`${path} is not a file`);\n return false;\n }\n return true;\n }\n\n const isReadable = async (path: string): Promise<boolean> => {\n try {\n await fs.promises.access(path, fs.constants.R_OK);\n } catch (error: any) {\n log(`${path} is not readable: %s %s`, error.message, error.stack);\n return false;\n }\n return true;\n }\n\n const isWritable = async (path: string): Promise<boolean> => {\n try {\n await fs.promises.access(path, fs.constants.W_OK);\n } catch (error: any) {\n log(`${path} is not writable: %s %s`, error.message, error.stack);\n return false;\n }\n return true;\n }\n\n const isFileReadable = async (path: string): Promise<boolean> => {\n return await exists(path) && await isFile(path) && await isReadable(path);\n }\n\n const isDirectoryWritable = async (path: string): Promise<boolean> => {\n return await exists(path) && await isDirectory(path) && await isWritable(path);\n }\n\n const isDirectoryReadable = async (path: string): Promise<boolean> => {\n return await exists(path) && await isDirectory(path) && await isReadable(path);\n }\n\n const createDirectory = async (path: string): Promise<void> => {\n try {\n await fs.promises.mkdir(path, { recursive: true });\n } catch (mkdirError: any) {\n throw new Error(`Failed to create output directory ${path}: ${mkdirError.message} ${mkdirError.stack}`);\n }\n }\n\n const readFile = async (path: string, encoding: string): Promise<string> => {\n return await fs.promises.readFile(path, { encoding: encoding as BufferEncoding });\n }\n\n const writeFile = async (path: string, data: string | Buffer, encoding: string): Promise<void> => {\n await fs.promises.writeFile(path, data, { encoding: encoding as BufferEncoding });\n }\n\n const forEachFileIn = async (directory: string, callback: (file: string) => Promise<void>, options: { pattern: string | string[] } = { pattern: '*.*' }): Promise<void> => {\n try {\n const files = await glob(options.pattern, { cwd: directory, nodir: true });\n for (const file of files) {\n await callback(path.join(directory, file));\n }\n } catch (err: any) {\n throw new Error(`Failed to glob pattern ${options.pattern} in ${directory}: ${err.message}`);\n }\n }\n\n const readStream = async (path: string): Promise<fs.ReadStream> => {\n return fs.createReadStream(path);\n }\n\n const hashFile = async (path: string, length: number): Promise<string> => {\n const file = await readFile(path, 'utf8');\n return crypto.createHash('sha256').update(file).digest('hex').slice(0, length);\n }\n\n const listFiles = async (directory: string): Promise<string[]> => {\n return await fs.promises.readdir(directory);\n }\n\n const deleteFile = async (path: string): Promise<void> => {\n await fs.promises.unlink(path);\n }\n\n const deleteDirectory = async (path: string): Promise<void> => {\n await fs.promises.rm(path, { recursive: true, force: true });\n }\n\n const getFileSize = async (path: string): Promise<number> => {\n const stats = await fs.promises.stat(path);\n return stats.size;\n }\n\n return {\n exists,\n isDirectory,\n isFile,\n isReadable,\n isWritable,\n isFileReadable,\n isDirectoryWritable,\n isDirectoryReadable,\n createDirectory,\n readFile,\n readStream,\n writeFile,\n forEachFileIn,\n hashFile,\n listFiles,\n deleteFile,\n deleteDirectory,\n getFileSize,\n };\n}"],"names":["path"],"mappings":";;;;;AAmCO,MAAM,MAAA,GAAS,CAAC,MAAA,KAAyE;AAG5F,EAAA,MAAM,GAAA,GAAM,MAAA,CAAO,GAAA,IAAO,OAAA,CAAQ,GAAA;AAElC,EAAA,MAAM,MAAA,GAAS,OAAOA,KAAAA,KAAmC;AACrD,IAAA,IAAI;AACA,MAAA,MAAM,EAAA,CAAG,QAAA,CAAS,IAAA,CAAKA,KAAI,CAAA;AAC3B,MAAA,OAAO,IAAA;AAAA,IAEX,SAAS,KAAA,EAAY;AACjB,MAAA,OAAO,KAAA;AAAA,IACX;AAAA,EACJ,CAAA;AAEA,EAAA,MAAM,WAAA,GAAc,OAAOA,KAAAA,KAAmC;AAC1D,IAAA,MAAM,KAAA,GAAQ,MAAM,EAAA,CAAG,QAAA,CAAS,KAAKA,KAAI,CAAA;AACzC,IAAA,IAAI,CAAC,KAAA,CAAM,WAAA,EAAY,EAAG;AACtB,MAAA,GAAA,CAAI,CAAA,EAAGA,KAAI,CAAA,mBAAA,CAAqB,CAAA;AAChC,MAAA,OAAO,KAAA;AAAA,IACX;AACA,IAAA,OAAO,IAAA;AAAA,EACX,CAAA;AAEA,EAAA,MAAM,MAAA,GAAS,OAAOA,KAAAA,KAAmC;AACrD,IAAA,MAAM,KAAA,GAAQ,MAAM,EAAA,CAAG,QAAA,CAAS,KAAKA,KAAI,CAAA;AACzC,IAAA,IAAI,CAAC,KAAA,CAAM,MAAA,EAAO,EAAG;AACjB,MAAA,GAAA,CAAI,CAAA,EAAGA,KAAI,CAAA,cAAA,CAAgB,CAAA;AAC3B,MAAA,OAAO,KAAA;AAAA,IACX;AACA,IAAA,OAAO,IAAA;AAAA,EACX,CAAA;AAEA,EAAA,MAAM,UAAA,GAAa,OAAOA,KAAAA,KAAmC;AACzD,IAAA,IAAI;AACA,MAAA,MAAM,GAAG,QAAA,CAAS,MAAA,CAAOA,KAAAA,EAAM,EAAA,CAAG,UAAU,IAAI,CAAA;AAAA,IACpD,SAAS,KAAA,EAAY;AACjB,MAAA,GAAA,CAAI,GAAGA,KAAI,CAAA,uBAAA,CAAA,EAA2B,KAAA,CAAM,OAAA,EAAS,MAAM,KAAK,CAAA;AAChE,MAAA,OAAO,KAAA;AAAA,IACX;AACA,IAAA,OAAO,IAAA;AAAA,EACX,CAAA;AAEA,EAAA,MAAM,UAAA,GAAa,OAAOA,KAAAA,KAAmC;AACzD,IAAA,IAAI;AACA,MAAA,MAAM,GAAG,QAAA,CAAS,MAAA,CAAOA,KAAAA,EAAM,EAAA,CAAG,UAAU,IAAI,CAAA;AAAA,IACpD,SAAS,KAAA,EAAY;AACjB,MAAA,GAAA,CAAI,GAAGA,KAAI,CAAA,uBAAA,CAAA,EAA2B,KAAA,CAAM,OAAA,EAAS,MAAM,KAAK,CAAA;AAChE,MAAA,OAAO,KAAA;AAAA,IACX;AACA,IAAA,OAAO,IAAA;AAAA,EACX,CAAA;AAEA,EAAA,MAAM,cAAA,GAAiB,OAAOA,KAAAA,KAAmC;AAC7D,IAAA,OAAO,MAAM,MAAA,CAAOA,KAAI,CAAA,IAAK,MAAM,OAAOA,KAAI,CAAA,IAAK,MAAM,UAAA,CAAWA,KAAI,CAAA;AAAA,EAC5E,CAAA;AAEA,EAAA,MAAM,mBAAA,GAAsB,OAAOA,KAAAA,KAAmC;AAClE,IAAA,OAAO,MAAM,MAAA,CAAOA,KAAI,CAAA,IAAK,MAAM,YAAYA,KAAI,CAAA,IAAK,MAAM,UAAA,CAAWA,KAAI,CAAA;AAAA,EACjF,CAAA;AAEA,EAAA,MAAM,mBAAA,GAAsB,OAAOA,KAAAA,KAAmC;AAClE,IAAA,OAAO,MAAM,MAAA,CAAOA,KAAI,CAAA,IAAK,MAAM,YAAYA,KAAI,CAAA,IAAK,MAAM,UAAA,CAAWA,KAAI,CAAA;AAAA,EACjF,CAAA;AAEA,EAAA,MAAM,eAAA,GAAkB,OAAOA,KAAAA,KAAgC;AAC3D,IAAA,IAAI;AACA,MAAA,MAAM,GAAG,QAAA,CAAS,KAAA,CAAMA,OAAM,EAAE,SAAA,EAAW,MAAM,CAAA;AAAA,IACrD,SAAS,UAAA,EAAiB;AACtB,MAAA,MAAM,IAAI,KAAA,CAAM,CAAA,kCAAA,EAAqCA,KAAI,CAAA,EAAA,EAAK,WAAW,OAAO,CAAA,CAAA,EAAI,UAAA,CAAW,KAAK,CAAA,CAAE,CAAA;AAAA,IAC1G;AAAA,EACJ,CAAA;AAEA,EAAA,MAAM,QAAA,GAAW,OAAOA,KAAAA,EAAc,QAAA,KAAsC;AACxE,IAAA,OAAO,MAAM,EAAA,CAAG,QAAA,CAAS,SAASA,KAAAA,EAAM,EAAE,UAAsC,CAAA;AAAA,EACpF,CAAA;AAEA,EAAA,MAAM,SAAA,GAAY,OAAOA,KAAAA,EAAc,IAAA,EAAuB,QAAA,KAAoC;AAC9F,IAAA,MAAM,GAAG,QAAA,CAAS,SAAA,CAAUA,OAAM,IAAA,EAAM,EAAE,UAAsC,CAAA;AAAA,EACpF,CAAA;AAEA,EAAA,MAAM,aAAA,GAAgB,OAAO,SAAA,EAAmB,QAAA,EAA2C,UAA0C,EAAE,OAAA,EAAS,OAAM,KAAqB;AACvK,IAAA,IAAI;AACA,MAAA,MAAM,KAAA,GAAQ,MAAM,IAAA,CAAK,OAAA,CAAQ,OAAA,EAAS,EAAE,GAAA,EAAK,SAAA,EAAW,KAAA,EAAO,IAAA,EAAM,CAAA;AACzE,MAAA,KAAA,MAAW,QAAQ,KAAA,EAAO;AACtB,QAAA,MAAM,QAAA,CAASA,aAAA,CAAK,IAAA,CAAK,SAAA,EAAW,IAAI,CAAC,CAAA;AAAA,MAC7C;AAAA,IACJ,SAAS,GAAA,EAAU;AACf,MAAA,MAAM,IAAI,KAAA,CAAM,CAAA,uBAAA,EAA0B,OAAA,CAAQ,OAAO,OAAO,SAAS,CAAA,EAAA,EAAK,GAAA,CAAI,OAAO,CAAA,CAAE,CAAA;AAAA,IAC/F;AAAA,EACJ,CAAA;AAEA,EAAA,MAAM,UAAA,GAAa,OAAOA,KAAAA,KAAyC;AAC/D,IAAA,OAAO,EAAA,CAAG,iBAAiBA,KAAI,CAAA;AAAA,EACnC,CAAA;AAEA,EAAA,MAAM,QAAA,GAAW,OAAOA,KAAAA,EAAc,MAAA,KAAoC;AACtE,IAAA,MAAM,IAAA,GAAO,MAAM,QAAA,CAASA,KAAAA,EAAM,MAAM,CAAA;AACxC,IAAA,OAAO,MAAA,CAAO,UAAA,CAAW,QAAQ,CAAA,CAAE,MAAA,CAAO,IAAI,CAAA,CAAE,MAAA,CAAO,KAAK,CAAA,CAAE,KAAA,CAAM,CAAA,EAAG,MAAM,CAAA;AAAA,EACjF,CAAA;AAEA,EAAA,MAAM,SAAA,GAAY,OAAO,SAAA,KAAyC;AAC9D,IAAA,OAAO,MAAM,EAAA,CAAG,QAAA,CAAS,OAAA,CAAQ,SAAS,CAAA;AAAA,EAC9C,CAAA;AAEA,EAAA,MAAM,UAAA,GAAa,OAAOA,KAAAA,KAAgC;AACtD,IAAA,MAAM,EAAA,CAAG,QAAA,CAAS,MAAA,CAAOA,KAAI,CAAA;AAAA,EACjC,CAAA;AAEA,EAAA,MAAM,eAAA,GAAkB,OAAOA,KAAAA,KAAgC;AAC3D,IAAA,MAAM,EAAA,CAAG,SAAS,EAAA,CAAGA,KAAAA,EAAM,EAAE,SAAA,EAAW,IAAA,EAAM,KAAA,EAAO,IAAA,EAAM,CAAA;AAAA,EAC/D,CAAA;AAEA,EAAA,MAAM,WAAA,GAAc,OAAOA,KAAAA,KAAkC;AACzD,IAAA,MAAM,KAAA,GAAQ,MAAM,EAAA,CAAG,QAAA,CAAS,KAAKA,KAAI,CAAA;AACzC,IAAA,OAAO,KAAA,CAAM,IAAA;AAAA,EACjB,CAAA;AAEA,EAAA,OAAO;AAAA,IACH,MAAA;AAAA,IACA,WAAA;AAAA,IACA,MAAA;AAAA,IACA,UAAA;AAAA,IACA,UAAA;AAAA,IACA,cAAA;AAAA,IACA,mBAAA;AAAA,IACA,mBAAA;AAAA,IACA,eAAA;AAAA,IACA,QAAA;AAAA,IACA,UAAA;AAAA,IACA,SAAA;AAAA,IACA,aAAA;AAAA,IACA,QAAA;AAAA,IACA,SAAA;AAAA,IACA,UAAA;AAAA,IACA,eAAA;AAAA,IACA;AAAA,GACJ;AACJ;;;;"}
|
package/dist/index13.js
ADDED
|
@@ -0,0 +1,124 @@
|
|
|
1
|
+
import ffmpeg from 'fluent-ffmpeg';
|
|
2
|
+
import path__default from 'node:path';
|
|
3
|
+
import { create as create$1 } from './index12.js';
|
|
4
|
+
|
|
5
|
+
const ffprobeAsync = (filePath) => {
|
|
6
|
+
return new Promise((resolve, reject) => {
|
|
7
|
+
ffmpeg.ffprobe(filePath, (err, metadata) => {
|
|
8
|
+
if (err) return reject(err);
|
|
9
|
+
resolve(metadata);
|
|
10
|
+
});
|
|
11
|
+
});
|
|
12
|
+
};
|
|
13
|
+
const create = (logger) => {
|
|
14
|
+
const storage$1 = create$1({ log: logger.debug });
|
|
15
|
+
const getAudioCreationTime = async (filePath) => {
|
|
16
|
+
try {
|
|
17
|
+
const metadata = await ffprobeAsync(filePath);
|
|
18
|
+
const formatTags = metadata?.format?.tags;
|
|
19
|
+
if (formatTags?.creation_time) {
|
|
20
|
+
logger.debug("Found creation_time in format tags: %s", formatTags.creation_time);
|
|
21
|
+
return new Date(formatTags.creation_time);
|
|
22
|
+
}
|
|
23
|
+
if (metadata?.streams?.length > 0) {
|
|
24
|
+
for (const stream of metadata.streams) {
|
|
25
|
+
if (stream.tags?.creation_time) {
|
|
26
|
+
logger.debug("Found creation_time in stream tags: %s", stream.tags.creation_time);
|
|
27
|
+
return new Date(stream.tags.creation_time);
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
logger.debug("No creation_time found in audio file metadata");
|
|
32
|
+
return null;
|
|
33
|
+
} catch (error) {
|
|
34
|
+
logger.error("Error extracting creation time from audio file: %s", error);
|
|
35
|
+
return null;
|
|
36
|
+
}
|
|
37
|
+
};
|
|
38
|
+
const getFileSize = async (filePath) => {
|
|
39
|
+
try {
|
|
40
|
+
return await storage$1.getFileSize(filePath);
|
|
41
|
+
} catch (error) {
|
|
42
|
+
logger.error("Error getting file size: %s", error);
|
|
43
|
+
throw new Error(`Failed to get file size for ${filePath}: ${error}`);
|
|
44
|
+
}
|
|
45
|
+
};
|
|
46
|
+
const splitAudioFile = async (filePath, outputDir, maxSizeBytes) => {
|
|
47
|
+
try {
|
|
48
|
+
const metadata = await ffprobeAsync(filePath);
|
|
49
|
+
const duration = parseFloat(metadata.format.duration);
|
|
50
|
+
const fileSize = await getFileSize(filePath);
|
|
51
|
+
const segmentCount = Math.ceil(fileSize / maxSizeBytes);
|
|
52
|
+
const segmentDuration = duration / segmentCount;
|
|
53
|
+
logger.debug(`Splitting ${filePath} (${fileSize} bytes) into ${segmentCount} segments of ~${segmentDuration} seconds each`);
|
|
54
|
+
await storage$1.createDirectory(outputDir);
|
|
55
|
+
const outputFiles = [];
|
|
56
|
+
const fileExt = path__default.extname(filePath);
|
|
57
|
+
const fileName = path__default.basename(filePath, fileExt);
|
|
58
|
+
const promises = [];
|
|
59
|
+
for (let i = 0; i < segmentCount; i++) {
|
|
60
|
+
const startTime = i * segmentDuration;
|
|
61
|
+
const outputPath = path__default.join(outputDir, `${fileName}_part${i + 1}${fileExt}`);
|
|
62
|
+
outputFiles.push(outputPath);
|
|
63
|
+
const promise = new Promise((resolve, reject) => {
|
|
64
|
+
ffmpeg(filePath).setStartTime(startTime).setDuration(segmentDuration).output(outputPath).on("end", () => {
|
|
65
|
+
logger.debug(`Created segment ${i + 1}/${segmentCount}: ${outputPath}`);
|
|
66
|
+
resolve();
|
|
67
|
+
}).on("error", (err) => {
|
|
68
|
+
logger.error(`Error creating segment ${i + 1}/${segmentCount}: ${err}`);
|
|
69
|
+
reject(err);
|
|
70
|
+
}).run();
|
|
71
|
+
});
|
|
72
|
+
promises.push(promise);
|
|
73
|
+
}
|
|
74
|
+
await Promise.all(promises);
|
|
75
|
+
return outputFiles;
|
|
76
|
+
} catch (error) {
|
|
77
|
+
logger.error("Error splitting audio file: %s", error);
|
|
78
|
+
throw new Error(`Failed to split audio file ${filePath}: ${error}`);
|
|
79
|
+
}
|
|
80
|
+
};
|
|
81
|
+
const convertToSupportedFormat = async (filePath, outputDir, forceConversion = false) => {
|
|
82
|
+
try {
|
|
83
|
+
const fileExt = path__default.extname(filePath).toLowerCase();
|
|
84
|
+
const supportedFormats = [".flac", ".m4a", ".mp3", ".mp4", ".mpeg", ".mpga", ".oga", ".ogg", ".wav", ".webm"];
|
|
85
|
+
if (supportedFormats.includes(fileExt) && !forceConversion) {
|
|
86
|
+
logger.debug(`File ${filePath} is already in a supported format: ${fileExt}`);
|
|
87
|
+
return filePath;
|
|
88
|
+
}
|
|
89
|
+
if (forceConversion && fileExt === ".mp3") {
|
|
90
|
+
logger.debug(`File ${filePath} is already MP3 (compressed format)`);
|
|
91
|
+
return filePath;
|
|
92
|
+
}
|
|
93
|
+
logger.info(`Converting ${fileExt} file to mp3 for transcription...`);
|
|
94
|
+
const fileName = path__default.basename(filePath, fileExt);
|
|
95
|
+
const outputPath = path__default.join(outputDir, `${fileName}.mp3`);
|
|
96
|
+
if (await storage$1.exists(outputPath)) {
|
|
97
|
+
logger.debug(`Converted file already exists: ${outputPath}`);
|
|
98
|
+
return outputPath;
|
|
99
|
+
}
|
|
100
|
+
await storage$1.createDirectory(outputDir);
|
|
101
|
+
return new Promise((resolve, reject) => {
|
|
102
|
+
ffmpeg(filePath).toFormat("mp3").audioBitrate("128k").output(outputPath).on("end", () => {
|
|
103
|
+
logger.info(`Successfully converted to: ${outputPath}`);
|
|
104
|
+
resolve(outputPath);
|
|
105
|
+
}).on("error", (err) => {
|
|
106
|
+
logger.error(`Error converting audio file: ${err}`);
|
|
107
|
+
reject(new Error(`Failed to convert ${filePath} to mp3: ${err.message}`));
|
|
108
|
+
}).run();
|
|
109
|
+
});
|
|
110
|
+
} catch (error) {
|
|
111
|
+
logger.error("Error in convertToSupportedFormat: %s", error);
|
|
112
|
+
throw new Error(`Failed to convert audio file ${filePath}: ${error}`);
|
|
113
|
+
}
|
|
114
|
+
};
|
|
115
|
+
return {
|
|
116
|
+
getAudioCreationTime,
|
|
117
|
+
getFileSize,
|
|
118
|
+
splitAudioFile,
|
|
119
|
+
convertToSupportedFormat
|
|
120
|
+
};
|
|
121
|
+
};
|
|
122
|
+
|
|
123
|
+
export { create };
|
|
124
|
+
//# sourceMappingURL=index13.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index13.js","sources":["../src/util/media.ts"],"sourcesContent":["import ffmpeg from 'fluent-ffmpeg';\nimport { Logger } from 'winston';\nimport path from 'node:path';\nimport * as Storage from '@/util/storage';\n\nexport interface Media {\n getAudioCreationTime: (filePath: string) => Promise<Date | null>;\n getFileSize: (filePath: string) => Promise<number>;\n splitAudioFile: (filePath: string, outputDir: string, maxSizeBytes: number) => Promise<string[]>;\n convertToSupportedFormat: (filePath: string, outputDir: string, forceConversion?: boolean) => Promise<string>;\n}\n\nconst ffprobeAsync = (filePath: string): Promise<any> => {\n return new Promise((resolve, reject) => {\n ffmpeg.ffprobe(filePath, (err, metadata) => {\n if (err) return reject(err);\n resolve(metadata);\n });\n });\n};\n\n\nexport const create = (logger: Logger): Media => {\n const storage = Storage.create({ log: logger.debug });\n\n // Extract creation time from audio file using ffmpeg\n const getAudioCreationTime = async (filePath: string): Promise<Date | null> => {\n try {\n const metadata = await ffprobeAsync(filePath);\n\n // Look for creation_time in format tags\n const formatTags = metadata?.format?.tags;\n if (formatTags?.creation_time) {\n logger.debug('Found creation_time in format tags: %s', formatTags.creation_time);\n return new Date(formatTags.creation_time);\n }\n\n // Check for creation_time in stream tags as fallback\n if (metadata?.streams?.length > 0) {\n for (const stream of metadata.streams) {\n if (stream.tags?.creation_time) {\n logger.debug('Found creation_time in stream tags: %s', stream.tags.creation_time);\n return new Date(stream.tags.creation_time);\n }\n }\n }\n\n logger.debug('No creation_time found in audio file metadata');\n return null;\n } catch (error) {\n logger.error('Error extracting creation time from audio file: %s', error);\n return null;\n }\n };\n\n // Get file size in bytes\n const getFileSize = async (filePath: string): Promise<number> => {\n try {\n return await storage.getFileSize(filePath);\n } catch (error) {\n logger.error('Error getting file size: %s', error);\n throw new Error(`Failed to get file size for ${filePath}: ${error}`);\n }\n };\n\n // Split large audio file into smaller chunks\n const splitAudioFile = async (filePath: string, outputDir: string, maxSizeBytes: number): Promise<string[]> => {\n try {\n const metadata = await ffprobeAsync(filePath);\n const duration = parseFloat(metadata.format.duration);\n\n // Calculate how many segments we need based on file size and max size\n const fileSize = await getFileSize(filePath);\n const segmentCount = Math.ceil(fileSize / maxSizeBytes);\n\n // Calculate segment duration\n const segmentDuration = duration / segmentCount;\n logger.debug(`Splitting ${filePath} (${fileSize} bytes) into ${segmentCount} segments of ~${segmentDuration} seconds each`);\n\n // Create output directory if it doesn't exist\n await storage.createDirectory(outputDir);\n\n const outputFiles: string[] = [];\n const fileExt = path.extname(filePath);\n const fileName = path.basename(filePath, fileExt);\n\n // Create a promise for each segment\n const promises = [];\n\n for (let i = 0; i < segmentCount; i++) {\n const startTime = i * segmentDuration;\n const outputPath = path.join(outputDir, `${fileName}_part${i + 1}${fileExt}`);\n outputFiles.push(outputPath);\n\n const promise = new Promise<void>((resolve, reject) => {\n ffmpeg(filePath)\n .setStartTime(startTime)\n .setDuration(segmentDuration)\n .output(outputPath)\n .on('end', () => {\n logger.debug(`Created segment ${i + 1}/${segmentCount}: ${outputPath}`);\n resolve();\n })\n .on('error', (err) => {\n logger.error(`Error creating segment ${i + 1}/${segmentCount}: ${err}`);\n reject(err);\n })\n .run();\n });\n\n promises.push(promise);\n }\n\n // Wait for all segments to be created\n await Promise.all(promises);\n return outputFiles;\n } catch (error) {\n logger.error('Error splitting audio file: %s', error);\n throw new Error(`Failed to split audio file ${filePath}: ${error}`);\n }\n };\n\n // Convert audio file to a format supported by OpenAI Whisper API\n // Supported formats: flac, m4a, mp3, mp4, mpeg, mpga, oga, ogg, wav, webm\n const convertToSupportedFormat = async (filePath: string, outputDir: string, forceConversion = false): Promise<string> => {\n try {\n const fileExt = path.extname(filePath).toLowerCase();\n\n // List of formats that OpenAI supports\n const supportedFormats = ['.flac', '.m4a', '.mp3', '.mp4', '.mpeg', '.mpga', '.oga', '.ogg', '.wav', '.webm'];\n\n // If already in a supported format and not forcing conversion, return as-is\n if (supportedFormats.includes(fileExt) && !forceConversion) {\n logger.debug(`File ${filePath} is already in a supported format: ${fileExt}`);\n return filePath;\n }\n\n // If forcing conversion and already MP3, return as-is (MP3 is already compressed)\n if (forceConversion && fileExt === '.mp3') {\n logger.debug(`File ${filePath} is already MP3 (compressed format)`);\n return filePath;\n }\n\n // Otherwise, convert to mp3 (widely supported and good compression)\n logger.info(`Converting ${fileExt} file to mp3 for transcription...`);\n const fileName = path.basename(filePath, fileExt);\n const outputPath = path.join(outputDir, `${fileName}.mp3`);\n\n // Check if converted file already exists\n if (await storage.exists(outputPath)) {\n logger.debug(`Converted file already exists: ${outputPath}`);\n return outputPath;\n }\n\n // Create output directory if it doesn't exist\n await storage.createDirectory(outputDir);\n\n return new Promise<string>((resolve, reject) => {\n ffmpeg(filePath)\n .toFormat('mp3')\n .audioBitrate('128k')\n .output(outputPath)\n .on('end', () => {\n logger.info(`Successfully converted to: ${outputPath}`);\n resolve(outputPath);\n })\n .on('error', (err) => {\n logger.error(`Error converting audio file: ${err}`);\n reject(new Error(`Failed to convert ${filePath} to mp3: ${err.message}`));\n })\n .run();\n });\n } catch (error) {\n logger.error('Error in convertToSupportedFormat: %s', error);\n throw new Error(`Failed to convert audio file ${filePath}: ${error}`);\n }\n };\n\n return {\n getAudioCreationTime,\n getFileSize,\n splitAudioFile,\n convertToSupportedFormat,\n }\n}\n"],"names":["storage","Storage.create","path"],"mappings":";;;;AAYA,MAAM,YAAA,GAAe,CAAC,QAAA,KAAmC;AACrD,EAAA,OAAO,IAAI,OAAA,CAAQ,CAAC,OAAA,EAAS,MAAA,KAAW;AACpC,IAAA,MAAA,CAAO,OAAA,CAAQ,QAAA,EAAU,CAAC,GAAA,EAAK,QAAA,KAAa;AACxC,MAAA,IAAI,GAAA,EAAK,OAAO,MAAA,CAAO,GAAG,CAAA;AAC1B,MAAA,OAAA,CAAQ,QAAQ,CAAA;AAAA,IACpB,CAAC,CAAA;AAAA,EACL,CAAC,CAAA;AACL,CAAA;AAGO,MAAM,MAAA,GAAS,CAAC,MAAA,KAA0B;AAC7C,EAAA,MAAMA,YAAUC,QAAQ,CAAO,EAAE,GAAA,EAAK,MAAA,CAAO,OAAO,CAAA;AAGpD,EAAA,MAAM,oBAAA,GAAuB,OAAO,QAAA,KAA2C;AAC3E,IAAA,IAAI;AACA,MAAA,MAAM,QAAA,GAAW,MAAM,YAAA,CAAa,QAAQ,CAAA;AAG5C,MAAA,MAAM,UAAA,GAAa,UAAU,MAAA,EAAQ,IAAA;AACrC,MAAA,IAAI,YAAY,aAAA,EAAe;AAC3B,QAAA,MAAA,CAAO,KAAA,CAAM,wCAAA,EAA0C,UAAA,CAAW,aAAa,CAAA;AAC/E,QAAA,OAAO,IAAI,IAAA,CAAK,UAAA,CAAW,aAAa,CAAA;AAAA,MAC5C;AAGA,MAAA,IAAI,QAAA,EAAU,OAAA,EAAS,MAAA,GAAS,CAAA,EAAG;AAC/B,QAAA,KAAA,MAAW,MAAA,IAAU,SAAS,OAAA,EAAS;AACnC,UAAA,IAAI,MAAA,CAAO,MAAM,aAAA,EAAe;AAC5B,YAAA,MAAA,CAAO,KAAA,CAAM,wCAAA,EAA0C,MAAA,CAAO,IAAA,CAAK,aAAa,CAAA;AAChF,YAAA,OAAO,IAAI,IAAA,CAAK,MAAA,CAAO,IAAA,CAAK,aAAa,CAAA;AAAA,UAC7C;AAAA,QACJ;AAAA,MACJ;AAEA,MAAA,MAAA,CAAO,MAAM,+CAA+C,CAAA;AAC5D,MAAA,OAAO,IAAA;AAAA,IACX,SAAS,KAAA,EAAO;AACZ,MAAA,MAAA,CAAO,KAAA,CAAM,sDAAsD,KAAK,CAAA;AACxE,MAAA,OAAO,IAAA;AAAA,IACX;AAAA,EACJ,CAAA;AAGA,EAAA,MAAM,WAAA,GAAc,OAAO,QAAA,KAAsC;AAC7D,IAAA,IAAI;AACA,MAAA,OAAO,MAAMD,SAAA,CAAQ,WAAA,CAAY,QAAQ,CAAA;AAAA,IAC7C,SAAS,KAAA,EAAO;AACZ,MAAA,MAAA,CAAO,KAAA,CAAM,+BAA+B,KAAK,CAAA;AACjD,MAAA,MAAM,IAAI,KAAA,CAAM,CAAA,4BAAA,EAA+B,QAAQ,CAAA,EAAA,EAAK,KAAK,CAAA,CAAE,CAAA;AAAA,IACvE;AAAA,EACJ,CAAA;AAGA,EAAA,MAAM,cAAA,GAAiB,OAAO,QAAA,EAAkB,SAAA,EAAmB,YAAA,KAA4C;AAC3G,IAAA,IAAI;AACA,MAAA,MAAM,QAAA,GAAW,MAAM,YAAA,CAAa,QAAQ,CAAA;AAC5C,MAAA,MAAM,QAAA,GAAW,UAAA,CAAW,QAAA,CAAS,MAAA,CAAO,QAAQ,CAAA;AAGpD,MAAA,MAAM,QAAA,GAAW,MAAM,WAAA,CAAY,QAAQ,CAAA;AAC3C,MAAA,MAAM,YAAA,GAAe,IAAA,CAAK,IAAA,CAAK,QAAA,GAAW,YAAY,CAAA;AAGtD,MAAA,MAAM,kBAAkB,QAAA,GAAW,YAAA;AACnC,MAAA,MAAA,CAAO,KAAA,CAAM,aAAa,QAAQ,CAAA,EAAA,EAAK,QAAQ,CAAA,aAAA,EAAgB,YAAY,CAAA,cAAA,EAAiB,eAAe,CAAA,aAAA,CAAe,CAAA;AAG1H,MAAA,MAAMA,SAAA,CAAQ,gBAAgB,SAAS,CAAA;AAEvC,MAAA,MAAM,cAAwB,EAAC;AAC/B,MAAA,MAAM,OAAA,GAAUE,aAAA,CAAK,OAAA,CAAQ,QAAQ,CAAA;AACrC,MAAA,MAAM,QAAA,GAAWA,aAAA,CAAK,QAAA,CAAS,QAAA,EAAU,OAAO,CAAA;AAGhD,MAAA,MAAM,WAAW,EAAC;AAElB,MAAA,KAAA,IAAS,CAAA,GAAI,CAAA,EAAG,CAAA,GAAI,YAAA,EAAc,CAAA,EAAA,EAAK;AACnC,QAAA,MAAM,YAAY,CAAA,GAAI,eAAA;AACtB,QAAA,MAAM,UAAA,GAAaA,aAAA,CAAK,IAAA,CAAK,SAAA,EAAW,CAAA,EAAG,QAAQ,CAAA,KAAA,EAAQ,CAAA,GAAI,CAAC,CAAA,EAAG,OAAO,CAAA,CAAE,CAAA;AAC5E,QAAA,WAAA,CAAY,KAAK,UAAU,CAAA;AAE3B,QAAA,MAAM,OAAA,GAAU,IAAI,OAAA,CAAc,CAAC,SAAS,MAAA,KAAW;AACnD,UAAA,MAAA,CAAO,QAAQ,CAAA,CACV,YAAA,CAAa,SAAS,CAAA,CACtB,WAAA,CAAY,eAAe,CAAA,CAC3B,MAAA,CAAO,UAAU,CAAA,CACjB,EAAA,CAAG,OAAO,MAAM;AACb,YAAA,MAAA,CAAO,KAAA,CAAM,mBAAmB,CAAA,GAAI,CAAC,IAAI,YAAY,CAAA,EAAA,EAAK,UAAU,CAAA,CAAE,CAAA;AACtE,YAAA,OAAA,EAAQ;AAAA,UACZ,CAAC,CAAA,CACA,EAAA,CAAG,OAAA,EAAS,CAAC,GAAA,KAAQ;AAClB,YAAA,MAAA,CAAO,KAAA,CAAM,0BAA0B,CAAA,GAAI,CAAC,IAAI,YAAY,CAAA,EAAA,EAAK,GAAG,CAAA,CAAE,CAAA;AACtE,YAAA,MAAA,CAAO,GAAG,CAAA;AAAA,UACd,CAAC,EACA,GAAA,EAAI;AAAA,QACb,CAAC,CAAA;AAED,QAAA,QAAA,CAAS,KAAK,OAAO,CAAA;AAAA,MACzB;AAGA,MAAA,MAAM,OAAA,CAAQ,IAAI,QAAQ,CAAA;AAC1B,MAAA,OAAO,WAAA;AAAA,IACX,SAAS,KAAA,EAAO;AACZ,MAAA,MAAA,CAAO,KAAA,CAAM,kCAAkC,KAAK,CAAA;AACpD,MAAA,MAAM,IAAI,KAAA,CAAM,CAAA,2BAAA,EAA8B,QAAQ,CAAA,EAAA,EAAK,KAAK,CAAA,CAAE,CAAA;AAAA,IACtE;AAAA,EACJ,CAAA;AAIA,EAAA,MAAM,wBAAA,GAA2B,OAAO,QAAA,EAAkB,SAAA,EAAmB,kBAAkB,KAAA,KAA2B;AACtH,IAAA,IAAI;AACA,MAAA,MAAM,OAAA,GAAUA,aAAA,CAAK,OAAA,CAAQ,QAAQ,EAAE,WAAA,EAAY;AAGnD,MAAA,MAAM,gBAAA,GAAmB,CAAC,OAAA,EAAS,MAAA,EAAQ,MAAA,EAAQ,MAAA,EAAQ,OAAA,EAAS,OAAA,EAAS,MAAA,EAAQ,MAAA,EAAQ,MAAA,EAAQ,OAAO,CAAA;AAG5G,MAAA,IAAI,gBAAA,CAAiB,QAAA,CAAS,OAAO,CAAA,IAAK,CAAC,eAAA,EAAiB;AACxD,QAAA,MAAA,CAAO,KAAA,CAAM,CAAA,KAAA,EAAQ,QAAQ,CAAA,mCAAA,EAAsC,OAAO,CAAA,CAAE,CAAA;AAC5E,QAAA,OAAO,QAAA;AAAA,MACX;AAGA,MAAA,IAAI,eAAA,IAAmB,YAAY,MAAA,EAAQ;AACvC,QAAA,MAAA,CAAO,KAAA,CAAM,CAAA,KAAA,EAAQ,QAAQ,CAAA,mCAAA,CAAqC,CAAA;AAClE,QAAA,OAAO,QAAA;AAAA,MACX;AAGA,MAAA,MAAA,CAAO,IAAA,CAAK,CAAA,WAAA,EAAc,OAAO,CAAA,iCAAA,CAAmC,CAAA;AACpE,MAAA,MAAM,QAAA,GAAWA,aAAA,CAAK,QAAA,CAAS,QAAA,EAAU,OAAO,CAAA;AAChD,MAAA,MAAM,aAAaA,aAAA,CAAK,IAAA,CAAK,SAAA,EAAW,CAAA,EAAG,QAAQ,CAAA,IAAA,CAAM,CAAA;AAGzD,MAAA,IAAI,MAAMF,SAAA,CAAQ,MAAA,CAAO,UAAU,CAAA,EAAG;AAClC,QAAA,MAAA,CAAO,KAAA,CAAM,CAAA,+BAAA,EAAkC,UAAU,CAAA,CAAE,CAAA;AAC3D,QAAA,OAAO,UAAA;AAAA,MACX;AAGA,MAAA,MAAMA,SAAA,CAAQ,gBAAgB,SAAS,CAAA;AAEvC,MAAA,OAAO,IAAI,OAAA,CAAgB,CAAC,OAAA,EAAS,MAAA,KAAW;AAC5C,QAAA,MAAA,CAAO,QAAQ,CAAA,CACV,QAAA,CAAS,KAAK,CAAA,CACd,YAAA,CAAa,MAAM,CAAA,CACnB,MAAA,CAAO,UAAU,CAAA,CACjB,EAAA,CAAG,OAAO,MAAM;AACb,UAAA,MAAA,CAAO,IAAA,CAAK,CAAA,2BAAA,EAA8B,UAAU,CAAA,CAAE,CAAA;AACtD,UAAA,OAAA,CAAQ,UAAU,CAAA;AAAA,QACtB,CAAC,CAAA,CACA,EAAA,CAAG,OAAA,EAAS,CAAC,GAAA,KAAQ;AAClB,UAAA,MAAA,CAAO,KAAA,CAAM,CAAA,6BAAA,EAAgC,GAAG,CAAA,CAAE,CAAA;AAClD,UAAA,MAAA,CAAO,IAAI,MAAM,CAAA,kBAAA,EAAqB,QAAQ,YAAY,GAAA,CAAI,OAAO,EAAE,CAAC,CAAA;AAAA,QAC5E,CAAC,EACA,GAAA,EAAI;AAAA,MACb,CAAC,CAAA;AAAA,IACL,SAAS,KAAA,EAAO;AACZ,MAAA,MAAA,CAAO,KAAA,CAAM,yCAAyC,KAAK,CAAA;AAC3D,MAAA,MAAM,IAAI,KAAA,CAAM,CAAA,6BAAA,EAAgC,QAAQ,CAAA,EAAA,EAAK,KAAK,CAAA,CAAE,CAAA;AAAA,IACxE;AAAA,EACJ,CAAA;AAEA,EAAA,OAAO;AAAA,IACH,oBAAA;AAAA,IACA,WAAA;AAAA,IACA,cAAA;AAAA,IACA;AAAA,GACJ;AACJ;;;;"}
|
package/dist/index14.js
ADDED
|
@@ -0,0 +1,296 @@
|
|
|
1
|
+
const formatMetadataMarkdown = (metadata) => {
|
|
2
|
+
const lines = [];
|
|
3
|
+
if (metadata.title) {
|
|
4
|
+
lines.push(`# ${metadata.title}`);
|
|
5
|
+
lines.push("");
|
|
6
|
+
}
|
|
7
|
+
lines.push("## Metadata");
|
|
8
|
+
lines.push("");
|
|
9
|
+
if (metadata.date) {
|
|
10
|
+
const dateStr = metadata.date.toLocaleDateString("en-US", {
|
|
11
|
+
year: "numeric",
|
|
12
|
+
month: "long",
|
|
13
|
+
day: "numeric"
|
|
14
|
+
});
|
|
15
|
+
lines.push(`**Date**: ${dateStr}`);
|
|
16
|
+
if (metadata.recordingTime) {
|
|
17
|
+
lines.push(`**Time**: ${metadata.recordingTime}`);
|
|
18
|
+
} else {
|
|
19
|
+
const timeStr = metadata.date.toLocaleTimeString("en-US", {
|
|
20
|
+
hour: "2-digit",
|
|
21
|
+
minute: "2-digit",
|
|
22
|
+
hour12: true
|
|
23
|
+
});
|
|
24
|
+
lines.push(`**Time**: ${timeStr}`);
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
lines.push("");
|
|
28
|
+
if (metadata.project) {
|
|
29
|
+
lines.push(`**Project**: ${metadata.project}`);
|
|
30
|
+
if (metadata.projectId) {
|
|
31
|
+
lines.push(`**Project ID**: \`${metadata.projectId}\``);
|
|
32
|
+
}
|
|
33
|
+
lines.push("");
|
|
34
|
+
}
|
|
35
|
+
if (metadata.routing) {
|
|
36
|
+
lines.push("### Routing");
|
|
37
|
+
lines.push("");
|
|
38
|
+
lines.push(`**Destination**: ${metadata.routing.destination}`);
|
|
39
|
+
lines.push(`**Confidence**: ${(metadata.routing.confidence * 100).toFixed(1)}%`);
|
|
40
|
+
lines.push("");
|
|
41
|
+
if (metadata.routing.signals.length > 0) {
|
|
42
|
+
lines.push("**Classification Signals**:");
|
|
43
|
+
for (const signal of metadata.routing.signals) {
|
|
44
|
+
const signalType = signal.type.replace(/_/g, " ");
|
|
45
|
+
const weight = (signal.weight * 100).toFixed(0);
|
|
46
|
+
lines.push(`- ${signalType}: "${signal.value}" (${weight}% weight)`);
|
|
47
|
+
}
|
|
48
|
+
lines.push("");
|
|
49
|
+
}
|
|
50
|
+
if (metadata.routing.reasoning) {
|
|
51
|
+
lines.push(`**Reasoning**: ${metadata.routing.reasoning}`);
|
|
52
|
+
lines.push("");
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
if (metadata.tags && metadata.tags.length > 0) {
|
|
56
|
+
lines.push("**Tags**: " + metadata.tags.map((tag) => `\`${tag}\``).join(", "));
|
|
57
|
+
lines.push("");
|
|
58
|
+
}
|
|
59
|
+
if (metadata.duration) {
|
|
60
|
+
lines.push(`**Duration**: ${metadata.duration}`);
|
|
61
|
+
lines.push("");
|
|
62
|
+
}
|
|
63
|
+
lines.push("---");
|
|
64
|
+
lines.push("");
|
|
65
|
+
return lines.join("\n");
|
|
66
|
+
};
|
|
67
|
+
const formatEntityMetadataMarkdown = (metadata) => {
|
|
68
|
+
if (!metadata.entities) {
|
|
69
|
+
return "";
|
|
70
|
+
}
|
|
71
|
+
const lines = [];
|
|
72
|
+
lines.push("");
|
|
73
|
+
lines.push("---");
|
|
74
|
+
lines.push("");
|
|
75
|
+
lines.push("## Entity References");
|
|
76
|
+
lines.push("");
|
|
77
|
+
lines.push("<!-- Machine-readable entity metadata for indexing and querying -->");
|
|
78
|
+
lines.push("");
|
|
79
|
+
if (metadata.entities.people && metadata.entities.people.length > 0) {
|
|
80
|
+
lines.push("### People");
|
|
81
|
+
lines.push("");
|
|
82
|
+
for (const person of metadata.entities.people) {
|
|
83
|
+
lines.push(`- \`${person.id}\`: ${person.name}`);
|
|
84
|
+
}
|
|
85
|
+
lines.push("");
|
|
86
|
+
}
|
|
87
|
+
if (metadata.entities.projects && metadata.entities.projects.length > 0) {
|
|
88
|
+
lines.push("### Projects");
|
|
89
|
+
lines.push("");
|
|
90
|
+
for (const project of metadata.entities.projects) {
|
|
91
|
+
lines.push(`- \`${project.id}\`: ${project.name}`);
|
|
92
|
+
}
|
|
93
|
+
lines.push("");
|
|
94
|
+
}
|
|
95
|
+
if (metadata.entities.terms && metadata.entities.terms.length > 0) {
|
|
96
|
+
lines.push("### Terms");
|
|
97
|
+
lines.push("");
|
|
98
|
+
for (const term of metadata.entities.terms) {
|
|
99
|
+
lines.push(`- \`${term.id}\`: ${term.name}`);
|
|
100
|
+
}
|
|
101
|
+
lines.push("");
|
|
102
|
+
}
|
|
103
|
+
if (metadata.entities.companies && metadata.entities.companies.length > 0) {
|
|
104
|
+
lines.push("### Companies");
|
|
105
|
+
lines.push("");
|
|
106
|
+
for (const company of metadata.entities.companies) {
|
|
107
|
+
lines.push(`- \`${company.id}\`: ${company.name}`);
|
|
108
|
+
}
|
|
109
|
+
lines.push("");
|
|
110
|
+
}
|
|
111
|
+
return lines.join("\n");
|
|
112
|
+
};
|
|
113
|
+
const parseEntityMetadata = (content) => {
|
|
114
|
+
const headerIndex = content.indexOf("## Entity References");
|
|
115
|
+
if (headerIndex === -1) {
|
|
116
|
+
return void 0;
|
|
117
|
+
}
|
|
118
|
+
let contentStart = headerIndex + "## Entity References".length;
|
|
119
|
+
while (contentStart < content.length && (content[contentStart] === "\n" || content[contentStart] === "\r" || content[contentStart] === " " || content[contentStart] === " ")) {
|
|
120
|
+
contentStart++;
|
|
121
|
+
}
|
|
122
|
+
const remainingContent = content.substring(contentStart);
|
|
123
|
+
const nextHeaderMatch = remainingContent.match(/\n## /);
|
|
124
|
+
const sectionContent = nextHeaderMatch ? remainingContent.substring(0, nextHeaderMatch.index) : remainingContent;
|
|
125
|
+
const entities = {
|
|
126
|
+
people: [],
|
|
127
|
+
projects: [],
|
|
128
|
+
terms: [],
|
|
129
|
+
companies: []
|
|
130
|
+
};
|
|
131
|
+
const parseEntities = (type) => {
|
|
132
|
+
const typeMap = {
|
|
133
|
+
"People": "person",
|
|
134
|
+
"Projects": "project",
|
|
135
|
+
"Terms": "term",
|
|
136
|
+
"Companies": "company"
|
|
137
|
+
};
|
|
138
|
+
const entityType = typeMap[type];
|
|
139
|
+
const sectionHeader = `### ${type}`;
|
|
140
|
+
const sectionStart = sectionContent.indexOf(sectionHeader);
|
|
141
|
+
if (sectionStart === -1) return [];
|
|
142
|
+
const headerEnd = sectionStart + sectionHeader.length;
|
|
143
|
+
let sectionTextStart = headerEnd;
|
|
144
|
+
while (sectionTextStart < sectionContent.length && (sectionContent[sectionTextStart] === "\n" || sectionContent[sectionTextStart] === "\r" || sectionContent[sectionTextStart] === " ")) {
|
|
145
|
+
sectionTextStart++;
|
|
146
|
+
}
|
|
147
|
+
const afterSection = sectionContent.substring(sectionTextStart);
|
|
148
|
+
const nextSection = afterSection.search(/\n###/);
|
|
149
|
+
const sectionText = nextSection === -1 ? afterSection : afterSection.substring(0, nextSection);
|
|
150
|
+
const items = [];
|
|
151
|
+
const lines = sectionText.split("\n");
|
|
152
|
+
for (const line of lines) {
|
|
153
|
+
const trimmed = line.trim();
|
|
154
|
+
const match = trimmed.match(/^- `([^`]+)`:\s*(.+)$/);
|
|
155
|
+
if (match) {
|
|
156
|
+
items.push({
|
|
157
|
+
id: match[1],
|
|
158
|
+
name: match[2].trim(),
|
|
159
|
+
type: entityType
|
|
160
|
+
});
|
|
161
|
+
}
|
|
162
|
+
}
|
|
163
|
+
return items;
|
|
164
|
+
};
|
|
165
|
+
entities.people = parseEntities("People");
|
|
166
|
+
entities.projects = parseEntities("Projects");
|
|
167
|
+
entities.terms = parseEntities("Terms");
|
|
168
|
+
entities.companies = parseEntities("Companies");
|
|
169
|
+
const hasEntities = entities.people.length > 0 || entities.projects.length > 0 || entities.terms.length > 0 || entities.companies.length > 0;
|
|
170
|
+
return hasEntities ? entities : void 0;
|
|
171
|
+
};
|
|
172
|
+
const createRoutingMetadata = (decision) => {
|
|
173
|
+
return {
|
|
174
|
+
destination: decision.destination.path,
|
|
175
|
+
confidence: decision.confidence,
|
|
176
|
+
signals: decision.signals,
|
|
177
|
+
reasoning: decision.reasoning
|
|
178
|
+
};
|
|
179
|
+
};
|
|
180
|
+
const formatDuration = (seconds) => {
|
|
181
|
+
const minutes = Math.floor(seconds / 60);
|
|
182
|
+
const secs = Math.round(seconds % 60);
|
|
183
|
+
if (minutes === 0) {
|
|
184
|
+
return `${secs}s`;
|
|
185
|
+
}
|
|
186
|
+
if (secs === 0) {
|
|
187
|
+
return `${minutes}m`;
|
|
188
|
+
}
|
|
189
|
+
return `${minutes}m ${secs}s`;
|
|
190
|
+
};
|
|
191
|
+
const formatTime = (date) => {
|
|
192
|
+
return date.toLocaleTimeString("en-US", {
|
|
193
|
+
hour: "2-digit",
|
|
194
|
+
minute: "2-digit",
|
|
195
|
+
hour12: true
|
|
196
|
+
});
|
|
197
|
+
};
|
|
198
|
+
const extractTopicFromSignals = (signals) => {
|
|
199
|
+
const topicSignal = signals.find((s) => s.type === "topic" || s.type === "context_type");
|
|
200
|
+
return topicSignal?.value;
|
|
201
|
+
};
|
|
202
|
+
const extractTagsFromSignals = (signals) => {
|
|
203
|
+
const tags = signals.filter((s) => s.type !== "context_type").map((s) => s.value).filter((v) => typeof v === "string");
|
|
204
|
+
return Array.from(new Set(tags));
|
|
205
|
+
};
|
|
206
|
+
const VALID_STATUSES = [
|
|
207
|
+
"initial",
|
|
208
|
+
"enhanced",
|
|
209
|
+
"reviewed",
|
|
210
|
+
"in_progress",
|
|
211
|
+
"closed",
|
|
212
|
+
"archived"
|
|
213
|
+
];
|
|
214
|
+
const isValidStatus = (status) => {
|
|
215
|
+
return VALID_STATUSES.includes(status);
|
|
216
|
+
};
|
|
217
|
+
const generateTaskId = () => {
|
|
218
|
+
const timestamp = Date.now();
|
|
219
|
+
const random = Math.random().toString(36).substring(2, 8);
|
|
220
|
+
return `task-${timestamp}-${random}`;
|
|
221
|
+
};
|
|
222
|
+
const createTask = (description) => {
|
|
223
|
+
return {
|
|
224
|
+
id: generateTaskId(),
|
|
225
|
+
description,
|
|
226
|
+
status: "open",
|
|
227
|
+
created: (/* @__PURE__ */ new Date()).toISOString()
|
|
228
|
+
};
|
|
229
|
+
};
|
|
230
|
+
const updateStatus = (metadata, newStatus) => {
|
|
231
|
+
const oldStatus = metadata.status;
|
|
232
|
+
if (oldStatus === newStatus) {
|
|
233
|
+
return metadata;
|
|
234
|
+
}
|
|
235
|
+
const transition = {
|
|
236
|
+
from: oldStatus || "reviewed",
|
|
237
|
+
to: newStatus,
|
|
238
|
+
at: (/* @__PURE__ */ new Date()).toISOString()
|
|
239
|
+
};
|
|
240
|
+
return {
|
|
241
|
+
...metadata,
|
|
242
|
+
status: newStatus,
|
|
243
|
+
history: [...metadata.history || [], transition]
|
|
244
|
+
};
|
|
245
|
+
};
|
|
246
|
+
const applyLifecycleDefaults = (metadata) => {
|
|
247
|
+
return {
|
|
248
|
+
...metadata,
|
|
249
|
+
status: metadata.status ?? "reviewed",
|
|
250
|
+
history: metadata.history ?? [],
|
|
251
|
+
tasks: metadata.tasks ?? []
|
|
252
|
+
};
|
|
253
|
+
};
|
|
254
|
+
const completeTask = (metadata, taskId) => {
|
|
255
|
+
const tasks = metadata.tasks || [];
|
|
256
|
+
const taskIndex = tasks.findIndex((t) => t.id === taskId);
|
|
257
|
+
if (taskIndex === -1) {
|
|
258
|
+
throw new Error(`Task not found: ${taskId}`);
|
|
259
|
+
}
|
|
260
|
+
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
261
|
+
const updatedTasks = [...tasks];
|
|
262
|
+
updatedTasks[taskIndex] = {
|
|
263
|
+
...updatedTasks[taskIndex],
|
|
264
|
+
status: "done",
|
|
265
|
+
changed: now,
|
|
266
|
+
completed: now
|
|
267
|
+
};
|
|
268
|
+
return {
|
|
269
|
+
...metadata,
|
|
270
|
+
tasks: updatedTasks
|
|
271
|
+
};
|
|
272
|
+
};
|
|
273
|
+
const deleteTask = (metadata, taskId) => {
|
|
274
|
+
const tasks = metadata.tasks || [];
|
|
275
|
+
const taskIndex = tasks.findIndex((t) => t.id === taskId);
|
|
276
|
+
if (taskIndex === -1) {
|
|
277
|
+
throw new Error(`Task not found: ${taskId}`);
|
|
278
|
+
}
|
|
279
|
+
return {
|
|
280
|
+
...metadata,
|
|
281
|
+
tasks: tasks.filter((t) => t.id !== taskId)
|
|
282
|
+
};
|
|
283
|
+
};
|
|
284
|
+
const addTask = (metadata, description) => {
|
|
285
|
+
const task = createTask(description);
|
|
286
|
+
return {
|
|
287
|
+
metadata: {
|
|
288
|
+
...metadata,
|
|
289
|
+
tasks: [...metadata.tasks || [], task]
|
|
290
|
+
},
|
|
291
|
+
task
|
|
292
|
+
};
|
|
293
|
+
};
|
|
294
|
+
|
|
295
|
+
export { VALID_STATUSES, addTask, applyLifecycleDefaults, completeTask, createRoutingMetadata, createTask, deleteTask, extractTagsFromSignals, extractTopicFromSignals, formatDuration, formatEntityMetadataMarkdown, formatMetadataMarkdown, formatTime, generateTaskId, isValidStatus, parseEntityMetadata, updateStatus };
|
|
296
|
+
//# sourceMappingURL=index14.js.map
|