@redaksjon/protokoll 0.0.8 → 0.0.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.cursor/rules/definition-of-done.md +89 -0
- package/.cursor/rules/no-emoticons.md +43 -0
- package/LICENSE +1 -1
- package/README.md +928 -35
- package/dist/agentic/executor.js +315 -0
- package/dist/agentic/executor.js.map +1 -0
- package/dist/agentic/index.js +19 -0
- package/dist/agentic/index.js.map +1 -0
- package/dist/agentic/registry.js +41 -0
- package/dist/agentic/registry.js.map +1 -0
- package/dist/agentic/tools/lookup-person.js +66 -0
- package/dist/agentic/tools/lookup-person.js.map +1 -0
- package/dist/agentic/tools/lookup-project.js +93 -0
- package/dist/agentic/tools/lookup-project.js.map +1 -0
- package/dist/agentic/tools/route-note.js +45 -0
- package/dist/agentic/tools/route-note.js.map +1 -0
- package/dist/agentic/tools/store-context.js +51 -0
- package/dist/agentic/tools/store-context.js.map +1 -0
- package/dist/agentic/tools/verify-spelling.js +57 -0
- package/dist/agentic/tools/verify-spelling.js.map +1 -0
- package/dist/arguments.js +23 -6
- package/dist/arguments.js.map +1 -1
- package/dist/constants.js +13 -11
- package/dist/constants.js.map +1 -1
- package/dist/context/discovery.js +114 -0
- package/dist/context/discovery.js.map +1 -0
- package/dist/context/index.js +58 -0
- package/dist/context/index.js.map +1 -0
- package/dist/context/storage.js +131 -0
- package/dist/context/storage.js.map +1 -0
- package/dist/interactive/handler.js +223 -0
- package/dist/interactive/handler.js.map +1 -0
- package/dist/interactive/index.js +18 -0
- package/dist/interactive/index.js.map +1 -0
- package/dist/interactive/onboarding.js +28 -0
- package/dist/interactive/onboarding.js.map +1 -0
- package/dist/main.js +0 -0
- package/dist/output/index.js +8 -0
- package/dist/output/index.js.map +1 -0
- package/dist/output/manager.js +105 -0
- package/dist/output/manager.js.map +1 -0
- package/dist/phases/complete.js +107 -0
- package/dist/phases/complete.js.map +1 -0
- package/dist/phases/locate.js +14 -5
- package/dist/phases/locate.js.map +1 -1
- package/dist/pipeline/index.js +8 -0
- package/dist/pipeline/index.js.map +1 -0
- package/dist/pipeline/orchestrator.js +281 -0
- package/dist/pipeline/orchestrator.js.map +1 -0
- package/dist/prompt/instructions/transcribe.md +6 -6
- package/dist/prompt/personas/transcriber.md +5 -5
- package/dist/protokoll.js +38 -5
- package/dist/protokoll.js.map +1 -1
- package/dist/reasoning/client.js +150 -0
- package/dist/reasoning/client.js.map +1 -0
- package/dist/reasoning/index.js +36 -0
- package/dist/reasoning/index.js.map +1 -0
- package/dist/reasoning/strategy.js +60 -0
- package/dist/reasoning/strategy.js.map +1 -0
- package/dist/reflection/collector.js +124 -0
- package/dist/reflection/collector.js.map +1 -0
- package/dist/reflection/index.js +16 -0
- package/dist/reflection/index.js.map +1 -0
- package/dist/reflection/reporter.js +238 -0
- package/dist/reflection/reporter.js.map +1 -0
- package/dist/routing/classifier.js +201 -0
- package/dist/routing/classifier.js.map +1 -0
- package/dist/routing/index.js +27 -0
- package/dist/routing/index.js.map +1 -0
- package/dist/routing/router.js +153 -0
- package/dist/routing/router.js.map +1 -0
- package/dist/transcription/index.js +41 -0
- package/dist/transcription/index.js.map +1 -0
- package/dist/transcription/service.js +64 -0
- package/dist/transcription/service.js.map +1 -0
- package/dist/transcription/types.js +31 -0
- package/dist/transcription/types.js.map +1 -0
- package/dist/util/media.js +4 -4
- package/dist/util/media.js.map +1 -1
- package/dist/util/metadata.js +95 -0
- package/dist/util/metadata.js.map +1 -0
- package/dist/util/storage.js +2 -2
- package/dist/util/storage.js.map +1 -1
- package/docs/examples.md +224 -0
- package/docs/index.html +5 -3
- package/docs/package-lock.json +639 -332
- package/docs/package.json +5 -4
- package/docs/troubleshooting.md +257 -0
- package/docs/vite.config.js +9 -3
- package/eslint.config.mjs +1 -0
- package/guide/architecture.md +217 -0
- package/guide/configuration.md +199 -0
- package/guide/context-system.md +215 -0
- package/guide/development.md +273 -0
- package/guide/index.md +91 -0
- package/guide/interactive.md +199 -0
- package/guide/quickstart.md +138 -0
- package/guide/reasoning.md +193 -0
- package/guide/routing.md +222 -0
- package/package.json +10 -7
- package/tsconfig.tsbuildinfo +1 -1
- package/vitest.config.ts +27 -5
- package/dist/phases/transcribe.js +0 -149
- package/dist/phases/transcribe.js.map +0 -1
- package/dist/processor.js +0 -35
- package/dist/processor.js.map +0 -1
- package/dist/prompt/transcribe.js +0 -41
- package/dist/prompt/transcribe.js.map +0 -1
- package/dist/util/general.js +0 -39
- package/dist/util/general.js.map +0 -1
- package/dist/util/openai.js +0 -92
- package/dist/util/openai.js.map +0 -1
|
@@ -0,0 +1,201 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Intelligent Classifier
|
|
3
|
+
*
|
|
4
|
+
* Multi-signal classification system for routing transcripts to projects.
|
|
5
|
+
* Uses various signals (explicit phrases, associated people/companies, topics)
|
|
6
|
+
* to determine the best project match with confidence scoring.
|
|
7
|
+
*
|
|
8
|
+
* Design Note: This module is designed to be self-contained and may be
|
|
9
|
+
* extracted for use in other tools (kronologi, observasjon) in the future.
|
|
10
|
+
*/ const create = (contextInstance)=>{
|
|
11
|
+
const classify = (routingContext, routes)=>{
|
|
12
|
+
const results = [];
|
|
13
|
+
const normalizedText = routingContext.transcriptText.toLowerCase();
|
|
14
|
+
for (const route of routes){
|
|
15
|
+
var _classification_explicit_phrases, _routingContext_detectedPeople, _classification_associated_people, _routingContext_detectedCompanies, _classification_associated_companies, _classification_topics;
|
|
16
|
+
if (route.active === false) continue;
|
|
17
|
+
const signals = [];
|
|
18
|
+
const classification = route.classification;
|
|
19
|
+
// 1. Check explicit phrases (highest weight)
|
|
20
|
+
for (const phrase of (_classification_explicit_phrases = classification.explicit_phrases) !== null && _classification_explicit_phrases !== void 0 ? _classification_explicit_phrases : []){
|
|
21
|
+
if (normalizedText.includes(phrase.toLowerCase())) {
|
|
22
|
+
signals.push({
|
|
23
|
+
type: 'explicit_phrase',
|
|
24
|
+
value: phrase,
|
|
25
|
+
weight: 0.9
|
|
26
|
+
});
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
// 2. Check associated people
|
|
30
|
+
const peopleInText = (_routingContext_detectedPeople = routingContext.detectedPeople) !== null && _routingContext_detectedPeople !== void 0 ? _routingContext_detectedPeople : detectPeopleFromContext(normalizedText, contextInstance);
|
|
31
|
+
for (const personId of (_classification_associated_people = classification.associated_people) !== null && _classification_associated_people !== void 0 ? _classification_associated_people : []){
|
|
32
|
+
if (peopleInText.includes(personId)) {
|
|
33
|
+
var _ref;
|
|
34
|
+
const person = contextInstance.getPerson(personId);
|
|
35
|
+
signals.push({
|
|
36
|
+
type: 'associated_person',
|
|
37
|
+
value: (_ref = person === null || person === void 0 ? void 0 : person.name) !== null && _ref !== void 0 ? _ref : personId,
|
|
38
|
+
weight: 0.6
|
|
39
|
+
});
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
// 3. Check associated companies
|
|
43
|
+
const companiesInText = (_routingContext_detectedCompanies = routingContext.detectedCompanies) !== null && _routingContext_detectedCompanies !== void 0 ? _routingContext_detectedCompanies : detectCompaniesFromContext(normalizedText, contextInstance);
|
|
44
|
+
for (const companyId of (_classification_associated_companies = classification.associated_companies) !== null && _classification_associated_companies !== void 0 ? _classification_associated_companies : []){
|
|
45
|
+
if (companiesInText.includes(companyId)) {
|
|
46
|
+
var _ref1;
|
|
47
|
+
const company = contextInstance.getCompany(companyId);
|
|
48
|
+
signals.push({
|
|
49
|
+
type: 'associated_company',
|
|
50
|
+
value: (_ref1 = company === null || company === void 0 ? void 0 : company.name) !== null && _ref1 !== void 0 ? _ref1 : companyId,
|
|
51
|
+
weight: 0.5
|
|
52
|
+
});
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
// 4. Check topics
|
|
56
|
+
for (const topic of (_classification_topics = classification.topics) !== null && _classification_topics !== void 0 ? _classification_topics : []){
|
|
57
|
+
if (normalizedText.includes(topic.toLowerCase())) {
|
|
58
|
+
signals.push({
|
|
59
|
+
type: 'topic',
|
|
60
|
+
value: topic,
|
|
61
|
+
weight: 0.3
|
|
62
|
+
});
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
// 5. Context type (if we can infer work vs personal)
|
|
66
|
+
// This is a weaker signal but helps with disambiguation
|
|
67
|
+
const inferredContextType = inferContextType(normalizedText);
|
|
68
|
+
if (inferredContextType === classification.context_type) {
|
|
69
|
+
signals.push({
|
|
70
|
+
type: 'context_type',
|
|
71
|
+
value: classification.context_type,
|
|
72
|
+
weight: 0.2
|
|
73
|
+
});
|
|
74
|
+
}
|
|
75
|
+
// Only include if we have at least one signal
|
|
76
|
+
if (signals.length > 0) {
|
|
77
|
+
const confidence = calculateConfidence(signals);
|
|
78
|
+
results.push({
|
|
79
|
+
projectId: route.projectId,
|
|
80
|
+
confidence,
|
|
81
|
+
signals,
|
|
82
|
+
reasoning: buildReasoning(signals)
|
|
83
|
+
});
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
// Sort by confidence descending
|
|
87
|
+
return results.sort((a, b)=>b.confidence - a.confidence);
|
|
88
|
+
};
|
|
89
|
+
const calculateConfidence = (signals)=>{
|
|
90
|
+
if (signals.length === 0) return 0;
|
|
91
|
+
// Weighted average with diminishing returns for multiple signals
|
|
92
|
+
let totalWeight = 0;
|
|
93
|
+
let weightedSum = 0;
|
|
94
|
+
for(let i = 0; i < signals.length; i++){
|
|
95
|
+
const signal = signals[i];
|
|
96
|
+
// Later signals contribute less (diminishing returns)
|
|
97
|
+
const positionFactor = 1 / (1 + i * 0.3);
|
|
98
|
+
const effectiveWeight = signal.weight * positionFactor;
|
|
99
|
+
weightedSum += effectiveWeight;
|
|
100
|
+
totalWeight += positionFactor;
|
|
101
|
+
}
|
|
102
|
+
// Normalize and cap at 0.99
|
|
103
|
+
return Math.min(weightedSum / Math.max(totalWeight, 1), 0.99);
|
|
104
|
+
};
|
|
105
|
+
const buildReasoning = (signals)=>{
|
|
106
|
+
const parts = signals.map((s)=>{
|
|
107
|
+
switch(s.type){
|
|
108
|
+
case 'explicit_phrase':
|
|
109
|
+
return `explicit phrase: "${s.value}"`;
|
|
110
|
+
case 'associated_person':
|
|
111
|
+
return `mentioned ${s.value} (associated)`;
|
|
112
|
+
case 'associated_company':
|
|
113
|
+
return `mentioned ${s.value} (associated company)`;
|
|
114
|
+
case 'topic':
|
|
115
|
+
return `topic: ${s.value}`;
|
|
116
|
+
case 'context_type':
|
|
117
|
+
return `context: ${s.value}`;
|
|
118
|
+
}
|
|
119
|
+
});
|
|
120
|
+
return parts.join(', ');
|
|
121
|
+
};
|
|
122
|
+
return {
|
|
123
|
+
classify,
|
|
124
|
+
calculateConfidence
|
|
125
|
+
};
|
|
126
|
+
};
|
|
127
|
+
// Helper functions
|
|
128
|
+
function detectPeopleFromContext(text, context) {
|
|
129
|
+
const found = [];
|
|
130
|
+
for (const person of context.getAllPeople()){
|
|
131
|
+
var _person_sounds_like;
|
|
132
|
+
const nameNormalized = person.name.toLowerCase();
|
|
133
|
+
if (text.includes(nameNormalized)) {
|
|
134
|
+
found.push(person.id);
|
|
135
|
+
continue;
|
|
136
|
+
}
|
|
137
|
+
// Check phonetic variants (sounds_like)
|
|
138
|
+
for (const variant of (_person_sounds_like = person.sounds_like) !== null && _person_sounds_like !== void 0 ? _person_sounds_like : []){
|
|
139
|
+
if (text.includes(variant.toLowerCase())) {
|
|
140
|
+
found.push(person.id);
|
|
141
|
+
break;
|
|
142
|
+
}
|
|
143
|
+
}
|
|
144
|
+
}
|
|
145
|
+
return found;
|
|
146
|
+
}
|
|
147
|
+
function detectCompaniesFromContext(text, context) {
|
|
148
|
+
const found = [];
|
|
149
|
+
for (const company of context.getAllCompanies()){
|
|
150
|
+
var _company_sounds_like;
|
|
151
|
+
const nameNormalized = company.name.toLowerCase();
|
|
152
|
+
if (text.includes(nameNormalized)) {
|
|
153
|
+
found.push(company.id);
|
|
154
|
+
continue;
|
|
155
|
+
}
|
|
156
|
+
// Check full name
|
|
157
|
+
if (company.fullName && text.includes(company.fullName.toLowerCase())) {
|
|
158
|
+
found.push(company.id);
|
|
159
|
+
continue;
|
|
160
|
+
}
|
|
161
|
+
// Check phonetic variants (sounds_like)
|
|
162
|
+
for (const variant of (_company_sounds_like = company.sounds_like) !== null && _company_sounds_like !== void 0 ? _company_sounds_like : []){
|
|
163
|
+
if (text.includes(variant.toLowerCase())) {
|
|
164
|
+
found.push(company.id);
|
|
165
|
+
break;
|
|
166
|
+
}
|
|
167
|
+
}
|
|
168
|
+
}
|
|
169
|
+
return found;
|
|
170
|
+
}
|
|
171
|
+
function inferContextType(text) {
|
|
172
|
+
const workIndicators = [
|
|
173
|
+
'meeting',
|
|
174
|
+
'project',
|
|
175
|
+
'deadline',
|
|
176
|
+
'team',
|
|
177
|
+
'client',
|
|
178
|
+
'report'
|
|
179
|
+
];
|
|
180
|
+
const personalIndicators = [
|
|
181
|
+
'family',
|
|
182
|
+
'weekend',
|
|
183
|
+
'vacation',
|
|
184
|
+
'hobby',
|
|
185
|
+
'friend'
|
|
186
|
+
];
|
|
187
|
+
let workScore = 0;
|
|
188
|
+
let personalScore = 0;
|
|
189
|
+
for (const word of workIndicators){
|
|
190
|
+
if (text.includes(word)) workScore++;
|
|
191
|
+
}
|
|
192
|
+
for (const word of personalIndicators){
|
|
193
|
+
if (text.includes(word)) personalScore++;
|
|
194
|
+
}
|
|
195
|
+
if (workScore > personalScore + 1) return 'work';
|
|
196
|
+
if (personalScore > workScore + 1) return 'personal';
|
|
197
|
+
return 'mixed';
|
|
198
|
+
}
|
|
199
|
+
|
|
200
|
+
export { create };
|
|
201
|
+
//# sourceMappingURL=classifier.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"classifier.js","sources":["../../src/routing/classifier.ts"],"sourcesContent":["/**\n * Intelligent Classifier\n * \n * Multi-signal classification system for routing transcripts to projects.\n * Uses various signals (explicit phrases, associated people/companies, topics)\n * to determine the best project match with confidence scoring.\n * \n * Design Note: This module is designed to be self-contained and may be\n * extracted for use in other tools (kronologi, observasjon) in the future.\n */\n\nimport { \n ClassificationResult, \n ClassificationSignal, \n ProjectRoute, \n RoutingContext \n} from './types';\nimport * as Context from '../context';\n\nexport interface ClassifierInstance {\n classify(context: RoutingContext, routes: ProjectRoute[]): ClassificationResult[];\n calculateConfidence(signals: ClassificationSignal[]): number;\n}\n\nexport const create = (contextInstance: Context.ContextInstance): ClassifierInstance => {\n \n const classify = (\n routingContext: RoutingContext, \n routes: ProjectRoute[]\n ): ClassificationResult[] => {\n const results: ClassificationResult[] = [];\n const normalizedText = routingContext.transcriptText.toLowerCase();\n \n for (const route of routes) {\n if (route.active === false) continue;\n \n const signals: ClassificationSignal[] = [];\n const classification = route.classification;\n \n // 1. Check explicit phrases (highest weight)\n for (const phrase of classification.explicit_phrases ?? []) {\n if (normalizedText.includes(phrase.toLowerCase())) {\n signals.push({\n type: 'explicit_phrase',\n value: phrase,\n weight: 0.9, // High confidence\n });\n }\n }\n \n // 2. Check associated people\n const peopleInText = routingContext.detectedPeople ?? \n detectPeopleFromContext(normalizedText, contextInstance);\n \n for (const personId of classification.associated_people ?? []) {\n if (peopleInText.includes(personId)) {\n const person = contextInstance.getPerson(personId);\n signals.push({\n type: 'associated_person',\n value: person?.name ?? personId,\n weight: 0.6,\n });\n }\n }\n \n // 3. Check associated companies\n const companiesInText = routingContext.detectedCompanies ?? \n detectCompaniesFromContext(normalizedText, contextInstance);\n \n for (const companyId of classification.associated_companies ?? []) {\n if (companiesInText.includes(companyId)) {\n const company = contextInstance.getCompany(companyId);\n signals.push({\n type: 'associated_company',\n value: company?.name ?? companyId,\n weight: 0.5,\n });\n }\n }\n \n // 4. Check topics\n for (const topic of classification.topics ?? []) {\n if (normalizedText.includes(topic.toLowerCase())) {\n signals.push({\n type: 'topic',\n value: topic,\n weight: 0.3,\n });\n }\n }\n \n // 5. Context type (if we can infer work vs personal)\n // This is a weaker signal but helps with disambiguation\n const inferredContextType = inferContextType(normalizedText);\n if (inferredContextType === classification.context_type) {\n signals.push({\n type: 'context_type',\n value: classification.context_type,\n weight: 0.2,\n });\n }\n \n // Only include if we have at least one signal\n if (signals.length > 0) {\n const confidence = calculateConfidence(signals);\n results.push({\n projectId: route.projectId,\n confidence,\n signals,\n reasoning: buildReasoning(signals),\n });\n }\n }\n \n // Sort by confidence descending\n return results.sort((a, b) => b.confidence - a.confidence);\n };\n \n const calculateConfidence = (signals: ClassificationSignal[]): number => {\n if (signals.length === 0) return 0;\n \n // Weighted average with diminishing returns for multiple signals\n let totalWeight = 0;\n let weightedSum = 0;\n \n for (let i = 0; i < signals.length; i++) {\n const signal = signals[i];\n // Later signals contribute less (diminishing returns)\n const positionFactor = 1 / (1 + i * 0.3);\n const effectiveWeight = signal.weight * positionFactor;\n \n weightedSum += effectiveWeight;\n totalWeight += positionFactor;\n }\n \n // Normalize and cap at 0.99\n return Math.min(weightedSum / Math.max(totalWeight, 1), 0.99);\n };\n \n const buildReasoning = (signals: ClassificationSignal[]): string => {\n const parts = signals.map(s => {\n switch (s.type) {\n case 'explicit_phrase': return `explicit phrase: \"${s.value}\"`;\n case 'associated_person': return `mentioned ${s.value} (associated)`;\n case 'associated_company': return `mentioned ${s.value} (associated company)`;\n case 'topic': return `topic: ${s.value}`;\n case 'context_type': return `context: ${s.value}`;\n }\n });\n return parts.join(', ');\n };\n \n return { classify, calculateConfidence };\n};\n\n// Helper functions\nfunction detectPeopleFromContext(\n text: string, \n context: Context.ContextInstance\n): string[] {\n const found: string[] = [];\n \n for (const person of context.getAllPeople()) {\n const nameNormalized = person.name.toLowerCase();\n if (text.includes(nameNormalized)) {\n found.push(person.id);\n continue;\n }\n \n // Check phonetic variants (sounds_like)\n for (const variant of person.sounds_like ?? []) {\n if (text.includes(variant.toLowerCase())) {\n found.push(person.id);\n break;\n }\n }\n }\n \n return found;\n}\n\nfunction detectCompaniesFromContext(\n text: string, \n context: Context.ContextInstance\n): string[] {\n const found: string[] = [];\n \n for (const company of context.getAllCompanies()) {\n const nameNormalized = company.name.toLowerCase();\n if (text.includes(nameNormalized)) {\n found.push(company.id);\n continue;\n }\n \n // Check full name\n if (company.fullName && text.includes(company.fullName.toLowerCase())) {\n found.push(company.id);\n continue;\n }\n \n // Check phonetic variants (sounds_like)\n for (const variant of company.sounds_like ?? []) {\n if (text.includes(variant.toLowerCase())) {\n found.push(company.id);\n break;\n }\n }\n }\n \n return found;\n}\n\nfunction inferContextType(text: string): 'work' | 'personal' | 'mixed' {\n const workIndicators = ['meeting', 'project', 'deadline', 'team', 'client', 'report'];\n const personalIndicators = ['family', 'weekend', 'vacation', 'hobby', 'friend'];\n \n let workScore = 0;\n let personalScore = 0;\n \n for (const word of workIndicators) {\n if (text.includes(word)) workScore++;\n }\n \n for (const word of personalIndicators) {\n if (text.includes(word)) personalScore++;\n }\n \n if (workScore > personalScore + 1) return 'work';\n if (personalScore > workScore + 1) return 'personal';\n return 'mixed';\n}\n\n"],"names":["create","contextInstance","classify","routingContext","routes","results","normalizedText","transcriptText","toLowerCase","route","classification","active","signals","phrase","explicit_phrases","includes","push","type","value","weight","peopleInText","detectedPeople","detectPeopleFromContext","personId","associated_people","person","getPerson","name","companiesInText","detectedCompanies","detectCompaniesFromContext","companyId","associated_companies","company","getCompany","topic","topics","inferredContextType","inferContextType","context_type","length","confidence","calculateConfidence","projectId","reasoning","buildReasoning","sort","a","b","totalWeight","weightedSum","i","signal","positionFactor","effectiveWeight","Math","min","max","parts","map","s","join","text","context","found","getAllPeople","nameNormalized","id","variant","sounds_like","getAllCompanies","fullName","workIndicators","personalIndicators","workScore","personalScore","word"],"mappings":"AAAA;;;;;;;;;IAwBO,MAAMA,MAAAA,GAAS,CAACC,eAAAA,GAAAA;IAEnB,MAAMC,QAAAA,GAAW,CACbC,cAAAA,EACAC,MAAAA,GAAAA;AAEA,QAAA,MAAMC,UAAkC,EAAE;AAC1C,QAAA,MAAMC,cAAAA,GAAiBH,cAAAA,CAAeI,cAAc,CAACC,WAAW,EAAA;QAEhE,KAAK,MAAMC,SAASL,MAAAA,CAAQ;gBAOHM,gCAAAA,EAWAP,8BAAAA,EAGEO,iCAAAA,EAYCP,iCAAAA,EAGAO,oCAAAA,EAYJA,sBAAAA;YA/CpB,IAAID,KAAAA,CAAME,MAAM,KAAK,KAAA,EAAO;AAE5B,YAAA,MAAMC,UAAkC,EAAE;YAC1C,MAAMF,cAAAA,GAAiBD,MAAMC,cAAc;;YAG3C,KAAK,MAAMG,WAAUH,gCAAAA,GAAAA,cAAAA,CAAeI,gBAAgB,MAAA,IAAA,IAA/BJ,gCAAAA,KAAAA,MAAAA,GAAAA,gCAAAA,GAAmC,EAAE,CAAE;AACxD,gBAAA,IAAIJ,cAAAA,CAAeS,QAAQ,CAACF,MAAAA,CAAOL,WAAW,EAAA,CAAA,EAAK;AAC/CI,oBAAAA,OAAAA,CAAQI,IAAI,CAAC;wBACTC,IAAAA,EAAM,iBAAA;wBACNC,KAAAA,EAAOL,MAAAA;wBACPM,MAAAA,EAAQ;AACZ,qBAAA,CAAA;AACJ,gBAAA;AACJ,YAAA;;YAGA,MAAMC,YAAAA,GAAAA,CAAejB,iCAAAA,cAAAA,CAAekB,cAAc,cAA7BlB,8BAAAA,KAAAA,MAAAA,GAAAA,8BAAAA,GACjBmB,wBAAwBhB,cAAAA,EAAgBL,eAAAA,CAAAA;YAE5C,KAAK,MAAMsB,aAAYb,iCAAAA,GAAAA,cAAAA,CAAec,iBAAiB,MAAA,IAAA,IAAhCd,iCAAAA,KAAAA,MAAAA,GAAAA,iCAAAA,GAAoC,EAAE,CAAE;gBAC3D,IAAIU,YAAAA,CAAaL,QAAQ,CAACQ,QAAAA,CAAAA,EAAW;;oBACjC,MAAME,MAAAA,GAASxB,eAAAA,CAAgByB,SAAS,CAACH,QAAAA,CAAAA;AACzCX,oBAAAA,OAAAA,CAAQI,IAAI,CAAC;wBACTC,IAAAA,EAAM,mBAAA;AACNC,wBAAAA,KAAK,EAAA,CAAA,IAAA,GAAEO,MAAAA,KAAAA,IAAAA,IAAAA,MAAAA,KAAAA,MAAAA,GAAAA,MAAAA,GAAAA,MAAAA,CAAQE,IAAI,MAAA,IAAA,IAAA,IAAA,KAAA,MAAA,GAAA,IAAA,GAAIJ,QAAAA;wBACvBJ,MAAAA,EAAQ;AACZ,qBAAA,CAAA;AACJ,gBAAA;AACJ,YAAA;;YAGA,MAAMS,eAAAA,GAAAA,CAAkBzB,oCAAAA,cAAAA,CAAe0B,iBAAiB,cAAhC1B,iCAAAA,KAAAA,MAAAA,GAAAA,iCAAAA,GACpB2B,2BAA2BxB,cAAAA,EAAgBL,eAAAA,CAAAA;YAE/C,KAAK,MAAM8B,cAAarB,oCAAAA,GAAAA,cAAAA,CAAesB,oBAAoB,MAAA,IAAA,IAAnCtB,oCAAAA,KAAAA,MAAAA,GAAAA,oCAAAA,GAAuC,EAAE,CAAE;gBAC/D,IAAIkB,eAAAA,CAAgBb,QAAQ,CAACgB,SAAAA,CAAAA,EAAY;;oBACrC,MAAME,OAAAA,GAAUhC,eAAAA,CAAgBiC,UAAU,CAACH,SAAAA,CAAAA;AAC3CnB,oBAAAA,OAAAA,CAAQI,IAAI,CAAC;wBACTC,IAAAA,EAAM,oBAAA;AACNC,wBAAAA,KAAK,EAAA,CAAA,KAAA,GAAEe,OAAAA,KAAAA,IAAAA,IAAAA,OAAAA,KAAAA,MAAAA,GAAAA,MAAAA,GAAAA,OAAAA,CAASN,IAAI,MAAA,IAAA,IAAA,KAAA,KAAA,MAAA,GAAA,KAAA,GAAII,SAAAA;wBACxBZ,MAAAA,EAAQ;AACZ,qBAAA,CAAA;AACJ,gBAAA;AACJ,YAAA;;YAGA,KAAK,MAAMgB,UAASzB,sBAAAA,GAAAA,cAAAA,CAAe0B,MAAM,MAAA,IAAA,IAArB1B,sBAAAA,KAAAA,MAAAA,GAAAA,sBAAAA,GAAyB,EAAE,CAAE;AAC7C,gBAAA,IAAIJ,cAAAA,CAAeS,QAAQ,CAACoB,KAAAA,CAAM3B,WAAW,EAAA,CAAA,EAAK;AAC9CI,oBAAAA,OAAAA,CAAQI,IAAI,CAAC;wBACTC,IAAAA,EAAM,OAAA;wBACNC,KAAAA,EAAOiB,KAAAA;wBACPhB,MAAAA,EAAQ;AACZ,qBAAA,CAAA;AACJ,gBAAA;AACJ,YAAA;;;AAIA,YAAA,MAAMkB,sBAAsBC,gBAAAA,CAAiBhC,cAAAA,CAAAA;YAC7C,IAAI+B,mBAAAA,KAAwB3B,cAAAA,CAAe6B,YAAY,EAAE;AACrD3B,gBAAAA,OAAAA,CAAQI,IAAI,CAAC;oBACTC,IAAAA,EAAM,cAAA;AACNC,oBAAAA,KAAAA,EAAOR,eAAe6B,YAAY;oBAClCpB,MAAAA,EAAQ;AACZ,iBAAA,CAAA;AACJ,YAAA;;YAGA,IAAIP,OAAAA,CAAQ4B,MAAM,GAAG,CAAA,EAAG;AACpB,gBAAA,MAAMC,aAAaC,mBAAAA,CAAoB9B,OAAAA,CAAAA;AACvCP,gBAAAA,OAAAA,CAAQW,IAAI,CAAC;AACT2B,oBAAAA,SAAAA,EAAWlC,MAAMkC,SAAS;AAC1BF,oBAAAA,UAAAA;AACA7B,oBAAAA,OAAAA;AACAgC,oBAAAA,SAAAA,EAAWC,cAAAA,CAAejC,OAAAA;AAC9B,iBAAA,CAAA;AACJ,YAAA;AACJ,QAAA;;QAGA,OAAOP,OAAAA,CAAQyC,IAAI,CAAC,CAACC,CAAAA,EAAGC,IAAMA,CAAAA,CAAEP,UAAU,GAAGM,CAAAA,CAAEN,UAAU,CAAA;AAC7D,IAAA,CAAA;AAEA,IAAA,MAAMC,sBAAsB,CAAC9B,OAAAA,GAAAA;AACzB,QAAA,IAAIA,OAAAA,CAAQ4B,MAAM,KAAK,CAAA,EAAG,OAAO,CAAA;;AAGjC,QAAA,IAAIS,WAAAA,GAAc,CAAA;AAClB,QAAA,IAAIC,WAAAA,GAAc,CAAA;AAElB,QAAA,IAAK,IAAIC,CAAAA,GAAI,CAAA,EAAGA,IAAIvC,OAAAA,CAAQ4B,MAAM,EAAEW,CAAAA,EAAAA,CAAK;YACrC,MAAMC,MAAAA,GAASxC,OAAO,CAACuC,CAAAA,CAAE;;AAEzB,YAAA,MAAME,cAAAA,GAAiB,CAAA,IAAK,CAAA,GAAIF,IAAI,GAAE,CAAA;YACtC,MAAMG,eAAAA,GAAkBF,MAAAA,CAAOjC,MAAM,GAAGkC,cAAAA;YAExCH,WAAAA,IAAeI,eAAAA;YACfL,WAAAA,IAAeI,cAAAA;AACnB,QAAA;;QAGA,OAAOE,IAAAA,CAAKC,GAAG,CAACN,WAAAA,GAAcK,KAAKE,GAAG,CAACR,aAAa,CAAA,CAAA,EAAI,IAAA,CAAA;AAC5D,IAAA,CAAA;AAEA,IAAA,MAAMJ,iBAAiB,CAACjC,OAAAA,GAAAA;AACpB,QAAA,MAAM8C,KAAAA,GAAQ9C,OAAAA,CAAQ+C,GAAG,CAACC,CAAAA,CAAAA,GAAAA;AACtB,YAAA,OAAQA,EAAE3C,IAAI;gBACV,KAAK,iBAAA;AAAmB,oBAAA,OAAO,CAAC,kBAAkB,EAAE2C,EAAE1C,KAAK,CAAC,CAAC,CAAC;gBAC9D,KAAK,mBAAA;AAAqB,oBAAA,OAAO,CAAC,UAAU,EAAE0C,EAAE1C,KAAK,CAAC,aAAa,CAAC;gBACpE,KAAK,oBAAA;AAAsB,oBAAA,OAAO,CAAC,UAAU,EAAE0C,EAAE1C,KAAK,CAAC,qBAAqB,CAAC;gBAC7E,KAAK,OAAA;AAAS,oBAAA,OAAO,CAAC,OAAO,EAAE0C,CAAAA,CAAE1C,KAAK,CAAA,CAAE;gBACxC,KAAK,cAAA;AAAgB,oBAAA,OAAO,CAAC,SAAS,EAAE0C,CAAAA,CAAE1C,KAAK,CAAA,CAAE;AACrD;AACJ,QAAA,CAAA,CAAA;QACA,OAAOwC,KAAAA,CAAMG,IAAI,CAAC,IAAA,CAAA;AACtB,IAAA,CAAA;IAEA,OAAO;AAAE3D,QAAAA,QAAAA;AAAUwC,QAAAA;AAAoB,KAAA;AAC3C;AAEA;AACA,SAASpB,uBAAAA,CACLwC,IAAY,EACZC,OAAgC,EAAA;AAEhC,IAAA,MAAMC,QAAkB,EAAE;AAE1B,IAAA,KAAK,MAAMvC,MAAAA,IAAUsC,OAAAA,CAAQE,YAAY,EAAA,CAAI;AAQnBxC,QAAAA,IAAAA,mBAAAA;AAPtB,QAAA,MAAMyC,cAAAA,GAAiBzC,MAAAA,CAAOE,IAAI,CAACnB,WAAW,EAAA;QAC9C,IAAIsD,IAAAA,CAAK/C,QAAQ,CAACmD,cAAAA,CAAAA,EAAiB;YAC/BF,KAAAA,CAAMhD,IAAI,CAACS,MAAAA,CAAO0C,EAAE,CAAA;AACpB,YAAA;AACJ,QAAA;;QAGA,KAAK,MAAMC,YAAW3C,mBAAAA,GAAAA,MAAAA,CAAO4C,WAAW,MAAA,IAAA,IAAlB5C,mBAAAA,KAAAA,MAAAA,GAAAA,mBAAAA,GAAsB,EAAE,CAAE;AAC5C,YAAA,IAAIqC,IAAAA,CAAK/C,QAAQ,CAACqD,OAAAA,CAAQ5D,WAAW,EAAA,CAAA,EAAK;gBACtCwD,KAAAA,CAAMhD,IAAI,CAACS,MAAAA,CAAO0C,EAAE,CAAA;AACpB,gBAAA;AACJ,YAAA;AACJ,QAAA;AACJ,IAAA;IAEA,OAAOH,KAAAA;AACX;AAEA,SAASlC,0BAAAA,CACLgC,IAAY,EACZC,OAAgC,EAAA;AAEhC,IAAA,MAAMC,QAAkB,EAAE;AAE1B,IAAA,KAAK,MAAM/B,OAAAA,IAAW8B,OAAAA,CAAQO,eAAe,EAAA,CAAI;AAcvBrC,QAAAA,IAAAA,oBAAAA;AAbtB,QAAA,MAAMiC,cAAAA,GAAiBjC,OAAAA,CAAQN,IAAI,CAACnB,WAAW,EAAA;QAC/C,IAAIsD,IAAAA,CAAK/C,QAAQ,CAACmD,cAAAA,CAAAA,EAAiB;YAC/BF,KAAAA,CAAMhD,IAAI,CAACiB,OAAAA,CAAQkC,EAAE,CAAA;AACrB,YAAA;AACJ,QAAA;;QAGA,IAAIlC,OAAAA,CAAQsC,QAAQ,IAAIT,IAAAA,CAAK/C,QAAQ,CAACkB,OAAAA,CAAQsC,QAAQ,CAAC/D,WAAW,EAAA,CAAA,EAAK;YACnEwD,KAAAA,CAAMhD,IAAI,CAACiB,OAAAA,CAAQkC,EAAE,CAAA;AACrB,YAAA;AACJ,QAAA;;QAGA,KAAK,MAAMC,YAAWnC,oBAAAA,GAAAA,OAAAA,CAAQoC,WAAW,MAAA,IAAA,IAAnBpC,oBAAAA,KAAAA,MAAAA,GAAAA,oBAAAA,GAAuB,EAAE,CAAE;AAC7C,YAAA,IAAI6B,IAAAA,CAAK/C,QAAQ,CAACqD,OAAAA,CAAQ5D,WAAW,EAAA,CAAA,EAAK;gBACtCwD,KAAAA,CAAMhD,IAAI,CAACiB,OAAAA,CAAQkC,EAAE,CAAA;AACrB,gBAAA;AACJ,YAAA;AACJ,QAAA;AACJ,IAAA;IAEA,OAAOH,KAAAA;AACX;AAEA,SAAS1B,iBAAiBwB,IAAY,EAAA;AAClC,IAAA,MAAMU,cAAAA,GAAiB;AAAC,QAAA,SAAA;AAAW,QAAA,SAAA;AAAW,QAAA,UAAA;AAAY,QAAA,MAAA;AAAQ,QAAA,QAAA;AAAU,QAAA;AAAS,KAAA;AACrF,IAAA,MAAMC,kBAAAA,GAAqB;AAAC,QAAA,QAAA;AAAU,QAAA,SAAA;AAAW,QAAA,UAAA;AAAY,QAAA,OAAA;AAAS,QAAA;AAAS,KAAA;AAE/E,IAAA,IAAIC,SAAAA,GAAY,CAAA;AAChB,IAAA,IAAIC,aAAAA,GAAgB,CAAA;IAEpB,KAAK,MAAMC,QAAQJ,cAAAA,CAAgB;QAC/B,IAAIV,IAAAA,CAAK/C,QAAQ,CAAC6D,IAAAA,CAAAA,EAAOF,SAAAA,EAAAA;AAC7B,IAAA;IAEA,KAAK,MAAME,QAAQH,kBAAAA,CAAoB;QACnC,IAAIX,IAAAA,CAAK/C,QAAQ,CAAC6D,IAAAA,CAAAA,EAAOD,aAAAA,EAAAA;AAC7B,IAAA;IAEA,IAAID,SAAAA,GAAYC,aAAAA,GAAgB,CAAA,EAAG,OAAO,MAAA;IAC1C,IAAIA,aAAAA,GAAgBD,SAAAA,GAAY,CAAA,EAAG,OAAO,UAAA;IAC1C,OAAO,OAAA;AACX;;;;"}
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
import { create as create$1 } from './router.js';
|
|
2
|
+
import { create as create$2 } from './classifier.js';
|
|
3
|
+
|
|
4
|
+
const create = (config, context)=>{
|
|
5
|
+
const classifier = create$2(context);
|
|
6
|
+
const router = create$1(config, classifier);
|
|
7
|
+
// Mutable config for self-update feature
|
|
8
|
+
const currentConfig = {
|
|
9
|
+
...config
|
|
10
|
+
};
|
|
11
|
+
return {
|
|
12
|
+
route: (ctx)=>router.route(ctx),
|
|
13
|
+
buildOutputPath: (decision, ctx)=>router.buildOutputPath(decision, ctx),
|
|
14
|
+
addProject: (project)=>{
|
|
15
|
+
currentConfig.projects.push(project);
|
|
16
|
+
},
|
|
17
|
+
updateDefaultRoute: (destination)=>{
|
|
18
|
+
currentConfig.default = destination;
|
|
19
|
+
},
|
|
20
|
+
getConfig: ()=>({
|
|
21
|
+
...currentConfig
|
|
22
|
+
})
|
|
23
|
+
};
|
|
24
|
+
};
|
|
25
|
+
|
|
26
|
+
export { create };
|
|
27
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.js","sources":["../../src/routing/index.ts"],"sourcesContent":["/**\n * Routing System\n * \n * Main entry point for the routing system. Provides a factory function\n * to create routing instances that can classify transcripts and determine\n * output destinations using Dreadcabinet patterns.\n * \n * Design Note: This module is designed to be self-contained and may be\n * extracted for use in other tools (kronologi, observasjon) in the future.\n */\n\nimport { RoutingConfig, RouteDecision, RoutingContext, RouteDestination, ProjectRoute } from './types';\nimport * as Router from './router';\nimport * as Classifier from './classifier';\nimport * as Context from '../context';\n\nexport interface RoutingInstance {\n route(context: RoutingContext): RouteDecision;\n buildOutputPath(decision: RouteDecision, context: RoutingContext): string;\n addProject(project: ProjectRoute): void;\n updateDefaultRoute(destination: RouteDestination): void;\n getConfig(): RoutingConfig;\n}\n\nexport const create = (\n config: RoutingConfig,\n context: Context.ContextInstance\n): RoutingInstance => {\n const classifier = Classifier.create(context);\n const router = Router.create(config, classifier);\n \n // Mutable config for self-update feature\n const currentConfig = { ...config };\n \n return {\n route: (ctx) => router.route(ctx),\n buildOutputPath: (decision, ctx) => router.buildOutputPath(decision, ctx),\n \n addProject: (project) => {\n currentConfig.projects.push(project);\n },\n \n updateDefaultRoute: (destination) => {\n currentConfig.default = destination;\n },\n \n getConfig: () => ({ ...currentConfig }),\n };\n};\n\n// Re-export types\nexport * from './types';\n\n"],"names":["create","config","context","classifier","Classifier","router","Router","currentConfig","route","ctx","buildOutputPath","decision","addProject","project","projects","push","updateDefaultRoute","destination","default","getConfig"],"mappings":";;;AAwBO,MAAMA,MAAAA,GAAS,CAClBC,MAAAA,EACAC,OAAAA,GAAAA;IAEA,MAAMC,UAAAA,GAAaC,QAAiB,CAACF,OAAAA,CAAAA;AACrC,IAAA,MAAMG,MAAAA,GAASC,QAAa,CAACL,MAAAA,EAAQE,UAAAA,CAAAA;;AAGrC,IAAA,MAAMI,aAAAA,GAAgB;AAAE,QAAA,GAAGN;AAAO,KAAA;IAElC,OAAO;AACHO,QAAAA,KAAAA,EAAO,CAACC,GAAAA,GAAQJ,MAAAA,CAAOG,KAAK,CAACC,GAAAA,CAAAA;AAC7BC,QAAAA,eAAAA,EAAiB,CAACC,QAAAA,EAAUF,GAAAA,GAAQJ,MAAAA,CAAOK,eAAe,CAACC,QAAAA,EAAUF,GAAAA,CAAAA;AAErEG,QAAAA,UAAAA,EAAY,CAACC,OAAAA,GAAAA;YACTN,aAAAA,CAAcO,QAAQ,CAACC,IAAI,CAACF,OAAAA,CAAAA;AAChC,QAAA,CAAA;AAEAG,QAAAA,kBAAAA,EAAoB,CAACC,WAAAA,GAAAA;AACjBV,YAAAA,aAAAA,CAAcW,OAAO,GAAGD,WAAAA;AAC5B,QAAA,CAAA;AAEAE,QAAAA,SAAAA,EAAW,KAAO;AAAE,gBAAA,GAAGZ;aAAc;AACzC,KAAA;AACJ;;;;"}
|
|
@@ -0,0 +1,153 @@
|
|
|
1
|
+
import * as path from 'path';
|
|
2
|
+
import * as os from 'os';
|
|
3
|
+
|
|
4
|
+
const create = (config, classifier)=>{
|
|
5
|
+
const route = (context)=>{
|
|
6
|
+
const results = classifier.classify(context, config.projects);
|
|
7
|
+
if (results.length === 0) {
|
|
8
|
+
return {
|
|
9
|
+
projectId: null,
|
|
10
|
+
destination: config.default,
|
|
11
|
+
confidence: 1.0,
|
|
12
|
+
signals: [],
|
|
13
|
+
reasoning: 'No project matches found, using default routing'
|
|
14
|
+
};
|
|
15
|
+
}
|
|
16
|
+
const bestMatch = results[0];
|
|
17
|
+
const matchedProject = config.projects.find((p)=>p.projectId === bestMatch.projectId);
|
|
18
|
+
// Handle conflict resolution if multiple high-confidence matches
|
|
19
|
+
const highConfidenceMatches = results.filter((r)=>r.confidence > 0.5);
|
|
20
|
+
if (highConfidenceMatches.length > 1 && config.conflict_resolution !== 'primary') {
|
|
21
|
+
// Return best with alternates noted
|
|
22
|
+
return {
|
|
23
|
+
projectId: bestMatch.projectId,
|
|
24
|
+
destination: matchedProject.destination,
|
|
25
|
+
confidence: bestMatch.confidence,
|
|
26
|
+
signals: bestMatch.signals,
|
|
27
|
+
reasoning: bestMatch.reasoning,
|
|
28
|
+
auto_tags: matchedProject.auto_tags,
|
|
29
|
+
alternateMatches: highConfidenceMatches.slice(1)
|
|
30
|
+
};
|
|
31
|
+
}
|
|
32
|
+
return {
|
|
33
|
+
projectId: bestMatch.projectId,
|
|
34
|
+
destination: matchedProject.destination,
|
|
35
|
+
confidence: bestMatch.confidence,
|
|
36
|
+
signals: bestMatch.signals,
|
|
37
|
+
reasoning: bestMatch.reasoning,
|
|
38
|
+
auto_tags: matchedProject.auto_tags
|
|
39
|
+
};
|
|
40
|
+
};
|
|
41
|
+
const buildOutputPath = (decision, context)=>{
|
|
42
|
+
const { destination } = decision;
|
|
43
|
+
// Expand ~ to home directory
|
|
44
|
+
const basePath = expandPath(destination.path);
|
|
45
|
+
// Build directory structure using Dreadcabinet patterns
|
|
46
|
+
const directoryPath = buildDirectoryPath(basePath, destination.structure, context.audioDate);
|
|
47
|
+
// Build filename using Dreadcabinet patterns
|
|
48
|
+
// Pass structure so filename doesn't repeat info already in path
|
|
49
|
+
const filename = buildFilename(destination.filename_options, context, destination.structure);
|
|
50
|
+
return path.join(directoryPath, filename + '.md');
|
|
51
|
+
};
|
|
52
|
+
return {
|
|
53
|
+
route,
|
|
54
|
+
buildOutputPath
|
|
55
|
+
};
|
|
56
|
+
};
|
|
57
|
+
// Dreadcabinet-style directory building
|
|
58
|
+
function buildDirectoryPath(basePath, structure, date) {
|
|
59
|
+
const year = date.getFullYear().toString();
|
|
60
|
+
const month = (date.getMonth() + 1).toString();
|
|
61
|
+
const day = date.getDate().toString();
|
|
62
|
+
switch(structure){
|
|
63
|
+
case 'none':
|
|
64
|
+
return basePath;
|
|
65
|
+
case 'year':
|
|
66
|
+
return path.join(basePath, year);
|
|
67
|
+
case 'month':
|
|
68
|
+
return path.join(basePath, year, month);
|
|
69
|
+
case 'day':
|
|
70
|
+
return path.join(basePath, year, month, day);
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
// Dreadcabinet-style filename building
|
|
74
|
+
// The date portion is adjusted based on what's already in the directory path
|
|
75
|
+
function buildFilename(options, context, structure) {
|
|
76
|
+
const parts = [];
|
|
77
|
+
const date = context.audioDate;
|
|
78
|
+
const pad = (n)=>n.toString().padStart(2, '0');
|
|
79
|
+
for (const option of options){
|
|
80
|
+
switch(option){
|
|
81
|
+
case 'date':
|
|
82
|
+
{
|
|
83
|
+
// Adjust date format based on directory structure
|
|
84
|
+
// Don't repeat info already in the path
|
|
85
|
+
const day = pad(date.getDate());
|
|
86
|
+
const month = pad(date.getMonth() + 1);
|
|
87
|
+
const year = date.getFullYear().toString().slice(2);
|
|
88
|
+
switch(structure){
|
|
89
|
+
case 'day':
|
|
90
|
+
break;
|
|
91
|
+
case 'month':
|
|
92
|
+
// Path has year/month - only day in filename
|
|
93
|
+
parts.push(day);
|
|
94
|
+
break;
|
|
95
|
+
case 'year':
|
|
96
|
+
// Path has year - month+day in filename
|
|
97
|
+
parts.push(`${month}-${day}`);
|
|
98
|
+
break;
|
|
99
|
+
case 'none':
|
|
100
|
+
// No date in path - full date in filename (YYMMDD)
|
|
101
|
+
parts.push(`${year}${month}${day}`);
|
|
102
|
+
break;
|
|
103
|
+
}
|
|
104
|
+
break;
|
|
105
|
+
}
|
|
106
|
+
case 'time':
|
|
107
|
+
{
|
|
108
|
+
const hours = pad(date.getHours());
|
|
109
|
+
const minutes = pad(date.getMinutes());
|
|
110
|
+
parts.push(`${hours}${minutes}`);
|
|
111
|
+
break;
|
|
112
|
+
}
|
|
113
|
+
case 'subject':
|
|
114
|
+
{
|
|
115
|
+
const subject = extractSubject(context.transcriptText, context.sourceFile);
|
|
116
|
+
if (subject) {
|
|
117
|
+
parts.push(subject);
|
|
118
|
+
}
|
|
119
|
+
break;
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
}
|
|
123
|
+
// Join and clean up any double dashes
|
|
124
|
+
return parts.join('-').replace(/--+/g, '-');
|
|
125
|
+
}
|
|
126
|
+
function extractSubject(text, sourceFile) {
|
|
127
|
+
var _ref;
|
|
128
|
+
var _text_split_;
|
|
129
|
+
// Try to extract from first sentence
|
|
130
|
+
const firstSentence = (_ref = (_text_split_ = text.split(/[.!?]/)[0]) === null || _text_split_ === void 0 ? void 0 : _text_split_.trim()) !== null && _ref !== void 0 ? _ref : '';
|
|
131
|
+
// Remove common prefixes
|
|
132
|
+
const cleaned = firstSentence.replace(/^(this is a note about|note about|regarding|re:|meeting notes?:?)/i, '').trim();
|
|
133
|
+
if (cleaned.length > 3 && cleaned.length < 50) {
|
|
134
|
+
return slugify(cleaned);
|
|
135
|
+
}
|
|
136
|
+
// Fall back to source filename
|
|
137
|
+
return path.basename(sourceFile, path.extname(sourceFile)).replace(/[^a-zA-Z0-9-]/g, '-').toLowerCase();
|
|
138
|
+
}
|
|
139
|
+
function slugify(text) {
|
|
140
|
+
return text.toLowerCase().replace(/[^a-z0-9]+/g, '-') // Replace non-alphanumeric with dash
|
|
141
|
+
.replace(/--+/g, '-') // Collapse multiple dashes
|
|
142
|
+
.replace(/^-|-$/g, '') // Remove leading/trailing dashes
|
|
143
|
+
.slice(0, 40);
|
|
144
|
+
}
|
|
145
|
+
function expandPath(p) {
|
|
146
|
+
if (p.startsWith('~')) {
|
|
147
|
+
return path.join(os.homedir(), p.slice(1));
|
|
148
|
+
}
|
|
149
|
+
return p;
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
export { create };
|
|
153
|
+
//# sourceMappingURL=router.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"router.js","sources":["../../src/routing/router.ts"],"sourcesContent":["/**\n * Router\n * \n * Handles routing decisions and path building using Dreadcabinet patterns.\n * Takes classification results and builds output paths with appropriate\n * directory structure and filenames.\n * \n * Design Note: This module is designed to be self-contained and may be\n * extracted for use in other tools (kronologi, observasjon) in the future.\n */\n\nimport * as path from 'path';\nimport * as os from 'os';\nimport { \n RoutingContext, \n RouteDecision,\n RoutingConfig,\n FilesystemStructure\n} from './types';\nimport * as Classifier from './classifier';\n\nexport interface RouterInstance {\n route(context: RoutingContext): RouteDecision;\n buildOutputPath(decision: RouteDecision, context: RoutingContext): string;\n}\n\nexport const create = (\n config: RoutingConfig,\n classifier: Classifier.ClassifierInstance\n): RouterInstance => {\n \n const route = (context: RoutingContext): RouteDecision => {\n const results = classifier.classify(context, config.projects);\n \n if (results.length === 0) {\n return {\n projectId: null,\n destination: config.default,\n confidence: 1.0,\n signals: [],\n reasoning: 'No project matches found, using default routing',\n };\n }\n \n const bestMatch = results[0];\n const matchedProject = config.projects.find(p => p.projectId === bestMatch.projectId)!;\n \n // Handle conflict resolution if multiple high-confidence matches\n const highConfidenceMatches = results.filter(r => r.confidence > 0.5);\n \n if (highConfidenceMatches.length > 1 && config.conflict_resolution !== 'primary') {\n // Return best with alternates noted\n return {\n projectId: bestMatch.projectId,\n destination: matchedProject.destination,\n confidence: bestMatch.confidence,\n signals: bestMatch.signals,\n reasoning: bestMatch.reasoning,\n auto_tags: matchedProject.auto_tags,\n alternateMatches: highConfidenceMatches.slice(1),\n };\n }\n \n return {\n projectId: bestMatch.projectId,\n destination: matchedProject.destination,\n confidence: bestMatch.confidence,\n signals: bestMatch.signals,\n reasoning: bestMatch.reasoning,\n auto_tags: matchedProject.auto_tags,\n };\n };\n \n const buildOutputPath = (decision: RouteDecision, context: RoutingContext): string => {\n const { destination } = decision;\n \n // Expand ~ to home directory\n const basePath = expandPath(destination.path);\n \n // Build directory structure using Dreadcabinet patterns\n const directoryPath = buildDirectoryPath(basePath, destination.structure, context.audioDate);\n \n // Build filename using Dreadcabinet patterns\n // Pass structure so filename doesn't repeat info already in path\n const filename = buildFilename(destination.filename_options, context, destination.structure);\n \n return path.join(directoryPath, filename + '.md');\n };\n \n return { route, buildOutputPath };\n};\n\n// Dreadcabinet-style directory building\nfunction buildDirectoryPath(\n basePath: string, \n structure: FilesystemStructure, \n date: Date\n): string {\n const year = date.getFullYear().toString();\n const month = (date.getMonth() + 1).toString();\n const day = date.getDate().toString();\n \n switch (structure) {\n case 'none':\n return basePath;\n case 'year':\n return path.join(basePath, year);\n case 'month':\n return path.join(basePath, year, month);\n case 'day':\n return path.join(basePath, year, month, day);\n }\n}\n\n// Dreadcabinet-style filename building\n// The date portion is adjusted based on what's already in the directory path\nfunction buildFilename(\n options: Array<'date' | 'time' | 'subject'>,\n context: RoutingContext,\n structure: FilesystemStructure\n): string {\n const parts: string[] = [];\n const date = context.audioDate;\n const pad = (n: number) => n.toString().padStart(2, '0');\n \n for (const option of options) {\n switch (option) {\n case 'date': {\n // Adjust date format based on directory structure\n // Don't repeat info already in the path\n const day = pad(date.getDate());\n const month = pad(date.getMonth() + 1);\n const year = date.getFullYear().toString().slice(2);\n \n switch (structure) {\n case 'day':\n // Path has year/month/day - no date needed in filename\n break;\n case 'month':\n // Path has year/month - only day in filename\n parts.push(day);\n break;\n case 'year':\n // Path has year - month+day in filename\n parts.push(`${month}-${day}`);\n break;\n case 'none':\n // No date in path - full date in filename (YYMMDD)\n parts.push(`${year}${month}${day}`);\n break;\n }\n break;\n }\n case 'time': {\n const hours = pad(date.getHours());\n const minutes = pad(date.getMinutes());\n parts.push(`${hours}${minutes}`);\n break;\n }\n case 'subject': {\n const subject = extractSubject(context.transcriptText, context.sourceFile);\n if (subject) {\n parts.push(subject);\n }\n break;\n }\n }\n }\n \n // Join and clean up any double dashes\n return parts.join('-').replace(/--+/g, '-');\n}\n\nfunction extractSubject(text: string, sourceFile: string): string {\n // Try to extract from first sentence\n const firstSentence = text.split(/[.!?]/)[0]?.trim() ?? '';\n \n // Remove common prefixes\n const cleaned = firstSentence\n .replace(/^(this is a note about|note about|regarding|re:|meeting notes?:?)/i, '')\n .trim();\n \n if (cleaned.length > 3 && cleaned.length < 50) {\n return slugify(cleaned);\n }\n \n // Fall back to source filename\n return path.basename(sourceFile, path.extname(sourceFile))\n .replace(/[^a-zA-Z0-9-]/g, '-')\n .toLowerCase();\n}\n\nfunction slugify(text: string): string {\n return text\n .toLowerCase()\n .replace(/[^a-z0-9]+/g, '-') // Replace non-alphanumeric with dash\n .replace(/--+/g, '-') // Collapse multiple dashes\n .replace(/^-|-$/g, '') // Remove leading/trailing dashes\n .slice(0, 40);\n}\n\nfunction expandPath(p: string): string {\n if (p.startsWith('~')) {\n return path.join(os.homedir(), p.slice(1));\n }\n return p;\n}\n\n"],"names":["create","config","classifier","route","context","results","classify","projects","length","projectId","destination","default","confidence","signals","reasoning","bestMatch","matchedProject","find","p","highConfidenceMatches","filter","r","conflict_resolution","auto_tags","alternateMatches","slice","buildOutputPath","decision","basePath","expandPath","path","directoryPath","buildDirectoryPath","structure","audioDate","filename","buildFilename","filename_options","join","date","year","getFullYear","toString","month","getMonth","day","getDate","options","parts","pad","n","padStart","option","push","hours","getHours","minutes","getMinutes","subject","extractSubject","transcriptText","sourceFile","replace","text","firstSentence","split","trim","cleaned","slugify","basename","extname","toLowerCase","startsWith","os","homedir"],"mappings":";;;AA0BO,MAAMA,MAAAA,GAAS,CAClBC,MAAAA,EACAC,UAAAA,GAAAA;AAGA,IAAA,MAAMC,QAAQ,CAACC,OAAAA,GAAAA;AACX,QAAA,MAAMC,UAAUH,UAAAA,CAAWI,QAAQ,CAACF,OAAAA,EAASH,OAAOM,QAAQ,CAAA;QAE5D,IAAIF,OAAAA,CAAQG,MAAM,KAAK,CAAA,EAAG;YACtB,OAAO;gBACHC,SAAAA,EAAW,IAAA;AACXC,gBAAAA,WAAAA,EAAaT,OAAOU,OAAO;gBAC3BC,UAAAA,EAAY,GAAA;AACZC,gBAAAA,OAAAA,EAAS,EAAE;gBACXC,SAAAA,EAAW;AACf,aAAA;AACJ,QAAA;QAEA,MAAMC,SAAAA,GAAYV,OAAO,CAAC,CAAA,CAAE;AAC5B,QAAA,MAAMW,cAAAA,GAAiBf,MAAAA,CAAOM,QAAQ,CAACU,IAAI,CAACC,CAAAA,CAAAA,GAAKA,CAAAA,CAAET,SAAS,KAAKM,SAAAA,CAAUN,SAAS,CAAA;;QAGpF,MAAMU,qBAAAA,GAAwBd,QAAQe,MAAM,CAACC,CAAAA,CAAAA,GAAKA,CAAAA,CAAET,UAAU,GAAG,GAAA,CAAA;AAEjE,QAAA,IAAIO,sBAAsBX,MAAM,GAAG,KAAKP,MAAAA,CAAOqB,mBAAmB,KAAK,SAAA,EAAW;;YAE9E,OAAO;AACHb,gBAAAA,SAAAA,EAAWM,UAAUN,SAAS;AAC9BC,gBAAAA,WAAAA,EAAaM,eAAeN,WAAW;AACvCE,gBAAAA,UAAAA,EAAYG,UAAUH,UAAU;AAChCC,gBAAAA,OAAAA,EAASE,UAAUF,OAAO;AAC1BC,gBAAAA,SAAAA,EAAWC,UAAUD,SAAS;AAC9BS,gBAAAA,SAAAA,EAAWP,eAAeO,SAAS;gBACnCC,gBAAAA,EAAkBL,qBAAAA,CAAsBM,KAAK,CAAC,CAAA;AAClD,aAAA;AACJ,QAAA;QAEA,OAAO;AACHhB,YAAAA,SAAAA,EAAWM,UAAUN,SAAS;AAC9BC,YAAAA,WAAAA,EAAaM,eAAeN,WAAW;AACvCE,YAAAA,UAAAA,EAAYG,UAAUH,UAAU;AAChCC,YAAAA,OAAAA,EAASE,UAAUF,OAAO;AAC1BC,YAAAA,SAAAA,EAAWC,UAAUD,SAAS;AAC9BS,YAAAA,SAAAA,EAAWP,eAAeO;AAC9B,SAAA;AACJ,IAAA,CAAA;IAEA,MAAMG,eAAAA,GAAkB,CAACC,QAAAA,EAAyBvB,OAAAA,GAAAA;QAC9C,MAAM,EAAEM,WAAW,EAAE,GAAGiB,QAAAA;;QAGxB,MAAMC,QAAAA,GAAWC,UAAAA,CAAWnB,WAAAA,CAAYoB,IAAI,CAAA;;AAG5C,QAAA,MAAMC,gBAAgBC,kBAAAA,CAAmBJ,QAAAA,EAAUlB,YAAYuB,SAAS,EAAE7B,QAAQ8B,SAAS,CAAA;;;AAI3F,QAAA,MAAMC,WAAWC,aAAAA,CAAc1B,WAAAA,CAAY2B,gBAAgB,EAAEjC,OAAAA,EAASM,YAAYuB,SAAS,CAAA;AAE3F,QAAA,OAAOH,IAAAA,CAAKQ,IAAI,CAACP,aAAAA,EAAeI,QAAAA,GAAW,KAAA,CAAA;AAC/C,IAAA,CAAA;IAEA,OAAO;AAAEhC,QAAAA,KAAAA;AAAOuB,QAAAA;AAAgB,KAAA;AACpC;AAEA;AACA,SAASM,kBAAAA,CACLJ,QAAgB,EAChBK,SAA8B,EAC9BM,IAAU,EAAA;AAEV,IAAA,MAAMC,IAAAA,GAAOD,IAAAA,CAAKE,WAAW,EAAA,CAAGC,QAAQ,EAAA;IACxC,MAAMC,KAAAA,GAAQ,CAACJ,IAAAA,CAAKK,QAAQ,EAAA,GAAK,CAAA,EAAGF,QAAQ,EAAA;AAC5C,IAAA,MAAMG,GAAAA,GAAMN,IAAAA,CAAKO,OAAO,EAAA,CAAGJ,QAAQ,EAAA;IAEnC,OAAQT,SAAAA;QACJ,KAAK,MAAA;YACD,OAAOL,QAAAA;QACX,KAAK,MAAA;YACD,OAAOE,IAAAA,CAAKQ,IAAI,CAACV,QAAAA,EAAUY,IAAAA,CAAAA;QAC/B,KAAK,OAAA;AACD,YAAA,OAAOV,IAAAA,CAAKQ,IAAI,CAACV,QAAAA,EAAUY,IAAAA,EAAMG,KAAAA,CAAAA;QACrC,KAAK,KAAA;AACD,YAAA,OAAOb,IAAAA,CAAKQ,IAAI,CAACV,QAAAA,EAAUY,MAAMG,KAAAA,EAAOE,GAAAA,CAAAA;AAChD;AACJ;AAEA;AACA;AACA,SAAST,aAAAA,CACLW,OAA2C,EAC3C3C,OAAuB,EACvB6B,SAA8B,EAAA;AAE9B,IAAA,MAAMe,QAAkB,EAAE;IAC1B,MAAMT,IAAAA,GAAOnC,QAAQ8B,SAAS;IAC9B,MAAMe,GAAAA,GAAM,CAACC,CAAAA,GAAcA,CAAAA,CAAER,QAAQ,EAAA,CAAGS,QAAQ,CAAC,CAAA,EAAG,GAAA,CAAA;IAEpD,KAAK,MAAMC,UAAUL,OAAAA,CAAS;QAC1B,OAAQK,MAAAA;YACJ,KAAK,MAAA;AAAQ,gBAAA;;;oBAGT,MAAMP,GAAAA,GAAMI,GAAAA,CAAIV,IAAAA,CAAKO,OAAO,EAAA,CAAA;AAC5B,oBAAA,MAAMH,KAAAA,GAAQM,GAAAA,CAAIV,IAAAA,CAAKK,QAAQ,EAAA,GAAK,CAAA,CAAA;AACpC,oBAAA,MAAMJ,OAAOD,IAAAA,CAAKE,WAAW,GAAGC,QAAQ,EAAA,CAAGjB,KAAK,CAAC,CAAA,CAAA;oBAEjD,OAAQQ,SAAAA;wBACJ,KAAK,KAAA;AAED,4BAAA;wBACJ,KAAK,OAAA;;AAEDe,4BAAAA,KAAAA,CAAMK,IAAI,CAACR,GAAAA,CAAAA;AACX,4BAAA;wBACJ,KAAK,MAAA;;AAEDG,4BAAAA,KAAAA,CAAMK,IAAI,CAAC,CAAA,EAAGV,KAAAA,CAAM,CAAC,EAAEE,GAAAA,CAAAA,CAAK,CAAA;AAC5B,4BAAA;wBACJ,KAAK,MAAA;;AAEDG,4BAAAA,KAAAA,CAAMK,IAAI,CAAC,CAAA,EAAGb,IAAAA,CAAAA,EAAOG,QAAQE,GAAAA,CAAAA,CAAK,CAAA;AAClC,4BAAA;AACR;AACA,oBAAA;AACJ,gBAAA;YACA,KAAK,MAAA;AAAQ,gBAAA;oBACT,MAAMS,KAAAA,GAAQL,GAAAA,CAAIV,IAAAA,CAAKgB,QAAQ,EAAA,CAAA;oBAC/B,MAAMC,OAAAA,GAAUP,GAAAA,CAAIV,IAAAA,CAAKkB,UAAU,EAAA,CAAA;AACnCT,oBAAAA,KAAAA,CAAMK,IAAI,CAAC,CAAA,EAAGC,KAAAA,CAAAA,EAAQE,OAAAA,CAAAA,CAAS,CAAA;AAC/B,oBAAA;AACJ,gBAAA;YACA,KAAK,SAAA;AAAW,gBAAA;AACZ,oBAAA,MAAME,UAAUC,cAAAA,CAAevD,OAAAA,CAAQwD,cAAc,EAAExD,QAAQyD,UAAU,CAAA;AACzE,oBAAA,IAAIH,OAAAA,EAAS;AACTV,wBAAAA,KAAAA,CAAMK,IAAI,CAACK,OAAAA,CAAAA;AACf,oBAAA;AACA,oBAAA;AACJ,gBAAA;AACJ;AACJ,IAAA;;AAGA,IAAA,OAAOV,MAAMV,IAAI,CAAC,GAAA,CAAA,CAAKwB,OAAO,CAAC,MAAA,EAAQ,GAAA,CAAA;AAC3C;AAEA,SAASH,cAAAA,CAAeI,IAAY,EAAEF,UAAkB,EAAA;;AAE9BE,IAAAA,IAAAA,YAAAA;;AAAtB,IAAA,MAAMC,aAAAA,GAAAA,CAAAA,IAAAA,GAAAA,CAAgBD,YAAAA,GAAAA,IAAAA,CAAKE,KAAK,CAAC,OAAA,CAAQ,CAAC,CAAA,CAAE,MAAA,IAAA,IAAtBF,YAAAA,KAAAA,MAAAA,GAAAA,MAAAA,GAAAA,YAAAA,CAAwBG,IAAI,EAAA,MAAA,IAAA,IAAA,IAAA,KAAA,MAAA,GAAA,IAAA,GAAM,EAAA;;AAGxD,IAAA,MAAMC,UAAUH,aAAAA,CACXF,OAAO,CAAC,oEAAA,EAAsE,IAC9EI,IAAI,EAAA;AAET,IAAA,IAAIC,QAAQ3D,MAAM,GAAG,KAAK2D,OAAAA,CAAQ3D,MAAM,GAAG,EAAA,EAAI;AAC3C,QAAA,OAAO4D,OAAAA,CAAQD,OAAAA,CAAAA;AACnB,IAAA;;AAGA,IAAA,OAAOrC,IAAAA,CAAKuC,QAAQ,CAACR,UAAAA,EAAY/B,IAAAA,CAAKwC,OAAO,CAACT,UAAAA,CAAAA,CAAAA,CACzCC,OAAO,CAAC,gBAAA,EAAkB,GAAA,CAAA,CAC1BS,WAAW,EAAA;AACpB;AAEA,SAASH,QAAQL,IAAY,EAAA;AACzB,IAAA,OAAOA,KACFQ,WAAW,EAAA,CACXT,OAAO,CAAC,aAAA,EAAe;KACvBA,OAAO,CAAC,MAAA,EAAQ,GAAA,CAAA;KAChBA,OAAO,CAAC,QAAA,EAAU,EAAA,CAAA;AAClBrC,KAAAA,KAAK,CAAC,CAAA,EAAG,EAAA,CAAA;AAClB;AAEA,SAASI,WAAWX,CAAS,EAAA;IACzB,IAAIA,CAAAA,CAAEsD,UAAU,CAAC,GAAA,CAAA,EAAM;QACnB,OAAO1C,IAAAA,CAAKQ,IAAI,CAACmC,EAAAA,CAAGC,OAAO,EAAA,EAAIxD,CAAAA,CAAEO,KAAK,CAAC,CAAA,CAAA,CAAA;AAC3C,IAAA;IACA,OAAOP,CAAAA;AACX;;;;"}
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
import OpenAI from 'openai';
|
|
2
|
+
import { create as create$1 } from './service.js';
|
|
3
|
+
|
|
4
|
+
const create = (options = {})=>{
|
|
5
|
+
var _options_defaultModel;
|
|
6
|
+
// Lazy-initialize OpenAI client (only when actually needed for transcription)
|
|
7
|
+
let service = null;
|
|
8
|
+
const getService = ()=>{
|
|
9
|
+
if (!service) {
|
|
10
|
+
var _options_openaiClient;
|
|
11
|
+
const openai = (_options_openaiClient = options.openaiClient) !== null && _options_openaiClient !== void 0 ? _options_openaiClient : new OpenAI({
|
|
12
|
+
apiKey: options.apiKey
|
|
13
|
+
});
|
|
14
|
+
service = create$1(openai);
|
|
15
|
+
}
|
|
16
|
+
return service;
|
|
17
|
+
};
|
|
18
|
+
let defaultModel = (_options_defaultModel = options.defaultModel) !== null && _options_defaultModel !== void 0 ? _options_defaultModel : 'whisper-1';
|
|
19
|
+
const transcribe = async (audioFile, configOptions = {})=>{
|
|
20
|
+
var _configOptions_model;
|
|
21
|
+
return getService().transcribe({
|
|
22
|
+
audioFile,
|
|
23
|
+
config: {
|
|
24
|
+
model: (_configOptions_model = configOptions.model) !== null && _configOptions_model !== void 0 ? _configOptions_model : defaultModel,
|
|
25
|
+
...configOptions
|
|
26
|
+
}
|
|
27
|
+
});
|
|
28
|
+
};
|
|
29
|
+
return {
|
|
30
|
+
transcribe,
|
|
31
|
+
supportsStreaming: (model)=>getService().supportsStreaming(model),
|
|
32
|
+
supportsDiarization: (model)=>getService().supportsDiarization(model),
|
|
33
|
+
setDefaultModel: (model)=>{
|
|
34
|
+
defaultModel = model;
|
|
35
|
+
},
|
|
36
|
+
getDefaultModel: ()=>defaultModel
|
|
37
|
+
};
|
|
38
|
+
};
|
|
39
|
+
|
|
40
|
+
export { create };
|
|
41
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.js","sources":["../../src/transcription/index.ts"],"sourcesContent":["/**\n * Transcription System\n * \n * Main entry point for the transcription system. Provides a factory function\n * to create transcription instances that can transcribe audio files using\n * various OpenAI models.\n * \n * Design Philosophy:\n * - Keep transcription simple - it produces raw phonetic output\n * - The reasoning pass handles corrections with full context\n * - Model choice is user preference (quality vs cost)\n */\n\nimport OpenAI from 'openai';\nimport { TranscriptionConfig, TranscriptionResult, TranscriptionModel } from './types';\nimport * as Service from './service';\n\nexport interface TranscriptionInstance {\n // Core transcription\n transcribe(audioFile: string, options?: Partial<TranscriptionConfig>): Promise<TranscriptionResult>;\n \n // Model capabilities\n supportsStreaming(model: TranscriptionModel): boolean;\n supportsDiarization(model: TranscriptionModel): boolean;\n \n // Configuration\n setDefaultModel(model: TranscriptionModel): void;\n getDefaultModel(): TranscriptionModel;\n}\n\nexport interface CreateOptions {\n apiKey?: string;\n defaultModel?: TranscriptionModel;\n openaiClient?: OpenAI;\n}\n\nexport const create = (options: CreateOptions = {}): TranscriptionInstance => {\n // Lazy-initialize OpenAI client (only when actually needed for transcription)\n let service: Service.ServiceInstance | null = null;\n const getService = (): Service.ServiceInstance => {\n if (!service) {\n const openai = options.openaiClient ?? new OpenAI({ apiKey: options.apiKey });\n service = Service.create(openai);\n }\n return service;\n };\n \n let defaultModel: TranscriptionModel = options.defaultModel ?? 'whisper-1';\n \n const transcribe = async (\n audioFile: string, \n configOptions: Partial<TranscriptionConfig> = {}\n ): Promise<TranscriptionResult> => {\n return getService().transcribe({\n audioFile,\n config: {\n model: configOptions.model ?? defaultModel,\n ...configOptions,\n },\n });\n };\n \n return {\n transcribe,\n supportsStreaming: (model) => getService().supportsStreaming(model),\n supportsDiarization: (model) => getService().supportsDiarization(model),\n setDefaultModel: (model) => { defaultModel = model; },\n getDefaultModel: () => defaultModel,\n };\n};\n\n// Re-export types\nexport * from './types';\n\n"],"names":["create","options","service","getService","openai","openaiClient","OpenAI","apiKey","Service","defaultModel","transcribe","audioFile","configOptions","config","model","supportsStreaming","supportsDiarization","setDefaultModel","getDefaultModel"],"mappings":";;;AAoCO,MAAMA,MAAAA,GAAS,CAACC,OAAAA,GAAyB,EAAE,GAAA;AAWPA,IAAAA,IAAAA,qBAAAA;;AATvC,IAAA,IAAIC,OAAAA,GAA0C,IAAA;AAC9C,IAAA,MAAMC,UAAAA,GAAa,IAAA;AACf,QAAA,IAAI,CAACD,OAAAA,EAAS;AACKD,YAAAA,IAAAA,qBAAAA;YAAf,MAAMG,MAAAA,GAAAA,CAASH,wBAAAA,OAAAA,CAAQI,YAAY,cAApBJ,qBAAAA,KAAAA,MAAAA,GAAAA,qBAAAA,GAAwB,IAAIK,MAAAA,CAAO;AAAEC,gBAAAA,MAAAA,EAAQN,QAAQM;AAAO,aAAA,CAAA;YAC3EL,OAAAA,GAAUM,QAAc,CAACJ,MAAAA,CAAAA;AAC7B,QAAA;QACA,OAAOF,OAAAA;AACX,IAAA,CAAA;AAEA,IAAA,IAAIO,gBAAmCR,qBAAAA,GAAAA,OAAAA,CAAQQ,YAAY,MAAA,IAAA,IAApBR,mCAAAA,qBAAAA,GAAwB,WAAA;AAE/D,IAAA,MAAMS,UAAAA,GAAa,OACfC,SAAAA,EACAC,aAAAA,GAA8C,EAAE,GAAA;AAKjCA,QAAAA,IAAAA,oBAAAA;QAHf,OAAOT,UAAAA,EAAAA,CAAaO,UAAU,CAAC;AAC3BC,YAAAA,SAAAA;YACAE,MAAAA,EAAQ;AACJC,gBAAAA,KAAK,GAAEF,oBAAAA,GAAAA,aAAAA,CAAcE,KAAK,MAAA,IAAA,IAAnBF,kCAAAA,oBAAAA,GAAuBH,YAAAA;AAC9B,gBAAA,GAAGG;AACP;AACJ,SAAA,CAAA;AACJ,IAAA,CAAA;IAEA,OAAO;AACHF,QAAAA,UAAAA;AACAK,QAAAA,iBAAAA,EAAmB,CAACD,KAAAA,GAAUX,UAAAA,EAAAA,CAAaY,iBAAiB,CAACD,KAAAA,CAAAA;AAC7DE,QAAAA,mBAAAA,EAAqB,CAACF,KAAAA,GAAUX,UAAAA,EAAAA,CAAaa,mBAAmB,CAACF,KAAAA,CAAAA;AACjEG,QAAAA,eAAAA,EAAiB,CAACH,KAAAA,GAAAA;YAAYL,YAAAA,GAAeK,KAAAA;AAAO,QAAA,CAAA;AACpDI,QAAAA,eAAAA,EAAiB,IAAMT;AAC3B,KAAA;AACJ;;;;"}
|
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
import { create as create$1 } from '../util/storage.js';
|
|
2
|
+
import { MODEL_CAPABILITIES } from './types.js';
|
|
3
|
+
import { getLogger } from '../logging.js';
|
|
4
|
+
|
|
5
|
+
const create = (openai)=>{
|
|
6
|
+
const logger = getLogger();
|
|
7
|
+
const storage = create$1({
|
|
8
|
+
log: logger.debug
|
|
9
|
+
});
|
|
10
|
+
const supportsStreaming = (model)=>{
|
|
11
|
+
var _ref;
|
|
12
|
+
var _MODEL_CAPABILITIES_model;
|
|
13
|
+
return (_ref = (_MODEL_CAPABILITIES_model = MODEL_CAPABILITIES[model]) === null || _MODEL_CAPABILITIES_model === void 0 ? void 0 : _MODEL_CAPABILITIES_model.supportsStreaming) !== null && _ref !== void 0 ? _ref : false;
|
|
14
|
+
};
|
|
15
|
+
const supportsDiarization = (model)=>{
|
|
16
|
+
var _ref;
|
|
17
|
+
var _MODEL_CAPABILITIES_model;
|
|
18
|
+
return (_ref = (_MODEL_CAPABILITIES_model = MODEL_CAPABILITIES[model]) === null || _MODEL_CAPABILITIES_model === void 0 ? void 0 : _MODEL_CAPABILITIES_model.supportsDiarization) !== null && _ref !== void 0 ? _ref : false;
|
|
19
|
+
};
|
|
20
|
+
const transcribe = async (request)=>{
|
|
21
|
+
var _config_response_format;
|
|
22
|
+
const { audioFile, config } = request;
|
|
23
|
+
logger.debug('Starting transcription', {
|
|
24
|
+
model: config.model,
|
|
25
|
+
file: audioFile
|
|
26
|
+
});
|
|
27
|
+
const audioStream = await storage.readStream(audioFile);
|
|
28
|
+
// Execute transcription
|
|
29
|
+
const startTime = Date.now();
|
|
30
|
+
const response = await openai.audio.transcriptions.create({
|
|
31
|
+
model: config.model,
|
|
32
|
+
file: audioStream,
|
|
33
|
+
response_format: (_config_response_format = config.response_format) !== null && _config_response_format !== void 0 ? _config_response_format : 'json',
|
|
34
|
+
...config.language && {
|
|
35
|
+
language: config.language
|
|
36
|
+
},
|
|
37
|
+
...config.temperature !== undefined && {
|
|
38
|
+
temperature: config.temperature
|
|
39
|
+
},
|
|
40
|
+
...config.prompt && {
|
|
41
|
+
prompt: config.prompt
|
|
42
|
+
}
|
|
43
|
+
});
|
|
44
|
+
const duration = Date.now() - startTime;
|
|
45
|
+
logger.debug('Transcription complete', {
|
|
46
|
+
duration,
|
|
47
|
+
model: config.model
|
|
48
|
+
});
|
|
49
|
+
// Handle the response
|
|
50
|
+
return {
|
|
51
|
+
text: response.text,
|
|
52
|
+
model: config.model,
|
|
53
|
+
duration
|
|
54
|
+
};
|
|
55
|
+
};
|
|
56
|
+
return {
|
|
57
|
+
transcribe,
|
|
58
|
+
supportsStreaming,
|
|
59
|
+
supportsDiarization
|
|
60
|
+
};
|
|
61
|
+
};
|
|
62
|
+
|
|
63
|
+
export { create };
|
|
64
|
+
//# sourceMappingURL=service.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"service.js","sources":["../../src/transcription/service.ts"],"sourcesContent":["/**\n * Transcription Service\n * \n * Handles audio transcription using OpenAI's transcription models.\n * Keeps transcription simple - the complexity is in the reasoning pass.\n */\n\nimport OpenAI from 'openai';\nimport * as Storage from '../util/storage';\nimport { \n TranscriptionRequest, \n TranscriptionResult,\n TranscriptionModel,\n MODEL_CAPABILITIES\n} from './types';\nimport * as Logging from '../logging';\n\nexport interface ServiceInstance {\n transcribe(request: TranscriptionRequest): Promise<TranscriptionResult>;\n supportsStreaming(model: TranscriptionModel): boolean;\n supportsDiarization(model: TranscriptionModel): boolean;\n}\n\n// Alias for backwards compatibility\nexport type TranscriptionService = ServiceInstance;\n\nexport const create = (openai: OpenAI): ServiceInstance => {\n const logger = Logging.getLogger();\n const storage = Storage.create({ log: logger.debug });\n \n const supportsStreaming = (model: TranscriptionModel): boolean => {\n return MODEL_CAPABILITIES[model]?.supportsStreaming ?? false;\n };\n \n const supportsDiarization = (model: TranscriptionModel): boolean => {\n return MODEL_CAPABILITIES[model]?.supportsDiarization ?? false;\n };\n \n const transcribe = async (request: TranscriptionRequest): Promise<TranscriptionResult> => {\n const { audioFile, config } = request;\n \n logger.debug('Starting transcription', { model: config.model, file: audioFile });\n \n const audioStream = await storage.readStream(audioFile);\n \n // Execute transcription\n const startTime = Date.now();\n const response = await openai.audio.transcriptions.create({\n model: config.model,\n file: audioStream,\n response_format: config.response_format ?? 'json',\n ...(config.language && { language: config.language }),\n ...(config.temperature !== undefined && { temperature: config.temperature }),\n ...(config.prompt && { prompt: config.prompt }),\n });\n const duration = Date.now() - startTime;\n \n logger.debug('Transcription complete', { duration, model: config.model });\n \n // Handle the response\n return {\n text: response.text,\n model: config.model,\n duration,\n };\n };\n \n return {\n transcribe,\n supportsStreaming,\n supportsDiarization,\n };\n};\n"],"names":["create","openai","logger","Logging","storage","Storage","log","debug","supportsStreaming","model","MODEL_CAPABILITIES","supportsDiarization","transcribe","request","config","audioFile","file","audioStream","readStream","startTime","Date","now","response","audio","transcriptions","response_format","language","temperature","undefined","prompt","duration","text"],"mappings":";;;;AA0BO,MAAMA,SAAS,CAACC,MAAAA,GAAAA;IACnB,MAAMC,MAAAA,GAASC,SAAiB,EAAA;IAChC,MAAMC,OAAAA,GAAUC,QAAc,CAAC;AAAEC,QAAAA,GAAAA,EAAKJ,OAAOK;AAAM,KAAA,CAAA;AAEnD,IAAA,MAAMC,oBAAoB,CAACC,KAAAA,GAAAA;;AAChBC,QAAAA,IAAAA,yBAAAA;QAAP,OAAA,CAAA,IAAA,GAAA,CAAOA,yBAAAA,GAAAA,kBAAkB,CAACD,KAAAA,CAAM,cAAzBC,yBAAAA,KAAAA,MAAAA,GAAAA,MAAAA,GAAAA,yBAAAA,CAA2BF,iBAAiB,MAAA,IAAA,IAAA,IAAA,KAAA,MAAA,GAAA,IAAA,GAAI,KAAA;AAC3D,IAAA,CAAA;AAEA,IAAA,MAAMG,sBAAsB,CAACF,KAAAA,GAAAA;;AAClBC,QAAAA,IAAAA,yBAAAA;QAAP,OAAA,CAAA,IAAA,GAAA,CAAOA,yBAAAA,GAAAA,kBAAkB,CAACD,KAAAA,CAAM,cAAzBC,yBAAAA,KAAAA,MAAAA,GAAAA,MAAAA,GAAAA,yBAAAA,CAA2BC,mBAAmB,MAAA,IAAA,IAAA,IAAA,KAAA,MAAA,GAAA,IAAA,GAAI,KAAA;AAC7D,IAAA,CAAA;AAEA,IAAA,MAAMC,aAAa,OAAOC,OAAAA,GAAAA;AAYDC,QAAAA,IAAAA,uBAAAA;AAXrB,QAAA,MAAM,EAAEC,SAAS,EAAED,MAAM,EAAE,GAAGD,OAAAA;QAE9BX,MAAAA,CAAOK,KAAK,CAAC,wBAAA,EAA0B;AAAEE,YAAAA,KAAAA,EAAOK,OAAOL,KAAK;YAAEO,IAAAA,EAAMD;AAAU,SAAA,CAAA;AAE9E,QAAA,MAAME,WAAAA,GAAc,MAAMb,OAAAA,CAAQc,UAAU,CAACH,SAAAA,CAAAA;;QAG7C,MAAMI,SAAAA,GAAYC,KAAKC,GAAG,EAAA;QAC1B,MAAMC,QAAAA,GAAW,MAAMrB,MAAAA,CAAOsB,KAAK,CAACC,cAAc,CAACxB,MAAM,CAAC;AACtDS,YAAAA,KAAAA,EAAOK,OAAOL,KAAK;YACnBO,IAAAA,EAAMC,WAAAA;AACNQ,YAAAA,eAAe,GAAEX,uBAAAA,GAAAA,MAAAA,CAAOW,eAAe,MAAA,IAAA,IAAtBX,qCAAAA,uBAAAA,GAA0B,MAAA;YAC3C,GAAIA,MAAAA,CAAOY,QAAQ,IAAI;AAAEA,gBAAAA,QAAAA,EAAUZ,OAAOY;aAAU;YACpD,GAAIZ,MAAAA,CAAOa,WAAW,KAAKC,SAAAA,IAAa;AAAED,gBAAAA,WAAAA,EAAab,OAAOa;aAAa;YAC3E,GAAIb,MAAAA,CAAOe,MAAM,IAAI;AAAEA,gBAAAA,MAAAA,EAAQf,OAAOe;;AAC1C,SAAA,CAAA;QACA,MAAMC,QAAAA,GAAWV,IAAAA,CAAKC,GAAG,EAAA,GAAKF,SAAAA;QAE9BjB,MAAAA,CAAOK,KAAK,CAAC,wBAAA,EAA0B;AAAEuB,YAAAA,QAAAA;AAAUrB,YAAAA,KAAAA,EAAOK,OAAOL;AAAM,SAAA,CAAA;;QAGvE,OAAO;AACHsB,YAAAA,IAAAA,EAAMT,SAASS,IAAI;AACnBtB,YAAAA,KAAAA,EAAOK,OAAOL,KAAK;AACnBqB,YAAAA;AACJ,SAAA;AACJ,IAAA,CAAA;IAEA,OAAO;AACHlB,QAAAA,UAAAA;AACAJ,QAAAA,iBAAAA;AACAG,QAAAA;AACJ,KAAA;AACJ;;;;"}
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Transcription System Types
|
|
3
|
+
*
|
|
4
|
+
* Supports multiple OpenAI transcription models with different capabilities.
|
|
5
|
+
* The transcription service produces raw phonetic output that will be
|
|
6
|
+
* corrected by the full reasoning pass.
|
|
7
|
+
*/ const MODEL_CAPABILITIES = {
|
|
8
|
+
'whisper-1': {
|
|
9
|
+
supportsStreaming: false,
|
|
10
|
+
supportsDiarization: false,
|
|
11
|
+
maxFileSize: 25 * 1024 * 1024
|
|
12
|
+
},
|
|
13
|
+
'gpt-4o-mini-transcribe': {
|
|
14
|
+
supportsStreaming: true,
|
|
15
|
+
supportsDiarization: false,
|
|
16
|
+
maxFileSize: 25 * 1024 * 1024
|
|
17
|
+
},
|
|
18
|
+
'gpt-4o-transcribe': {
|
|
19
|
+
supportsStreaming: true,
|
|
20
|
+
supportsDiarization: false,
|
|
21
|
+
maxFileSize: 25 * 1024 * 1024
|
|
22
|
+
},
|
|
23
|
+
'gpt-4o-transcribe-diarize': {
|
|
24
|
+
supportsStreaming: true,
|
|
25
|
+
supportsDiarization: true,
|
|
26
|
+
maxFileSize: 25 * 1024 * 1024
|
|
27
|
+
}
|
|
28
|
+
};
|
|
29
|
+
|
|
30
|
+
export { MODEL_CAPABILITIES };
|
|
31
|
+
//# sourceMappingURL=types.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"types.js","sources":["../../src/transcription/types.ts"],"sourcesContent":["/**\n * Transcription System Types\n * \n * Supports multiple OpenAI transcription models with different capabilities.\n * The transcription service produces raw phonetic output that will be\n * corrected by the full reasoning pass.\n */\n\nexport type TranscriptionModel = \n | 'whisper-1'\n | 'gpt-4o-mini-transcribe'\n | 'gpt-4o-transcribe'\n | 'gpt-4o-transcribe-diarize';\n\nexport interface TranscriptionConfig {\n model: TranscriptionModel;\n language?: string;\n prompt?: string;\n response_format?: 'json' | 'text' | 'verbose_json' | 'srt' | 'vtt';\n temperature?: number;\n streaming?: boolean;\n}\n\nexport interface TranscriptionRequest {\n audioFile: string; // Path to audio file\n config: TranscriptionConfig;\n contextPrompt?: string; // Built from known entities (limited to 224 tokens)\n}\n\nexport interface TranscriptionSegment {\n start: number;\n end: number;\n text: string;\n speaker?: string; // For diarization\n}\n\nexport interface TranscriptionResult {\n text: string;\n model: string;\n segments?: TranscriptionSegment[];\n duration?: number;\n language?: string;\n}\n\nexport interface ModelCapabilities {\n supportsStreaming: boolean;\n supportsDiarization: boolean;\n maxFileSize: number;\n}\n\nexport const MODEL_CAPABILITIES: Record<TranscriptionModel, ModelCapabilities> = {\n 'whisper-1': {\n supportsStreaming: false,\n supportsDiarization: false,\n maxFileSize: 25 * 1024 * 1024, // 25 MB\n },\n 'gpt-4o-mini-transcribe': {\n supportsStreaming: true,\n supportsDiarization: false,\n maxFileSize: 25 * 1024 * 1024,\n },\n 'gpt-4o-transcribe': {\n supportsStreaming: true,\n supportsDiarization: false,\n maxFileSize: 25 * 1024 * 1024,\n },\n 'gpt-4o-transcribe-diarize': {\n supportsStreaming: true,\n supportsDiarization: true,\n maxFileSize: 25 * 1024 * 1024,\n },\n};\n\n"],"names":["MODEL_CAPABILITIES","supportsStreaming","supportsDiarization","maxFileSize"],"mappings":"AAAA;;;;;;UAkDaA,kBAAAA,GAAoE;IAC7E,WAAA,EAAa;QACTC,iBAAAA,EAAmB,KAAA;QACnBC,mBAAAA,EAAqB,KAAA;AACrBC,QAAAA,WAAAA,EAAa,KAAK,IAAA,GAAO;AAC7B,KAAA;IACA,wBAAA,EAA0B;QACtBF,iBAAAA,EAAmB,IAAA;QACnBC,mBAAAA,EAAqB,KAAA;AACrBC,QAAAA,WAAAA,EAAa,KAAK,IAAA,GAAO;AAC7B,KAAA;IACA,mBAAA,EAAqB;QACjBF,iBAAAA,EAAmB,IAAA;QACnBC,mBAAAA,EAAqB,KAAA;AACrBC,QAAAA,WAAAA,EAAa,KAAK,IAAA,GAAO;AAC7B,KAAA;IACA,2BAAA,EAA6B;QACzBF,iBAAAA,EAAmB,IAAA;QACnBC,mBAAAA,EAAqB,IAAA;AACrBC,QAAAA,WAAAA,EAAa,KAAK,IAAA,GAAO;AAC7B;AACJ;;;;"}
|