@redaksjon/protokoll-engine 0.1.4 → 0.1.5-dev.20260218064546.576a29c

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index33.js CHANGED
@@ -1,6 +1,6 @@
1
1
  import * as Context from '@redaksjon/context';
2
2
  import { create as create$1 } from './index6.js';
3
- import { create as create$2 } from './index59.js';
3
+ import { create as create$2 } from './index53.js';
4
4
  import { create as create$3 } from './index11.js';
5
5
  import { create as create$4 } from './index3.js';
6
6
  import { create as create$5 } from './index2.js';
package/dist/index34.js CHANGED
@@ -1,8 +1,8 @@
1
1
  import { getLogger } from './index47.js';
2
2
  import { create as create$1 } from './index13.js';
3
3
  import { create as create$2 } from './index14.js';
4
- import { transcribeAudio } from './index53.js';
5
- import { stringifyJSON } from './index54.js';
4
+ import { transcribeAudio } from './index54.js';
5
+ import { stringifyJSON } from './index55.js';
6
6
  import path__default from 'node:path';
7
7
  import { create as create$4 } from './index5.js';
8
8
  import { create as create$3 } from './index2.js';
package/dist/index35.js CHANGED
@@ -1,9 +1,9 @@
1
1
  import { getLogger } from './index47.js';
2
2
  import { create as create$1 } from './index13.js';
3
- import { create as create$2 } from './index55.js';
4
- import { create as create$3 } from './index56.js';
5
- import { create as create$4 } from './index57.js';
6
- import { stringifyJSON } from './index54.js';
3
+ import { create as create$2 } from './index56.js';
4
+ import { create as create$3 } from './index57.js';
5
+ import { create as create$4 } from './index58.js';
6
+ import { stringifyJSON } from './index55.js';
7
7
  import path from 'path';
8
8
 
9
9
  const create = (config, _operator) => {
package/dist/index36.js CHANGED
@@ -1,7 +1,7 @@
1
1
  import { getLogger } from './index47.js';
2
2
  import { create as create$3 } from './index14.js';
3
3
  import { create as create$1 } from './index13.js';
4
- import { create as create$2 } from './index58.js';
4
+ import { create as create$2 } from './index59.js';
5
5
  import { DEFAULT_INTERMEDIATE_DIRECTORY } from './index18.js';
6
6
  import path__default from 'node:path';
7
7
 
package/dist/index53.js CHANGED
@@ -1,51 +1,8 @@
1
- import { OpenAI } from 'openai';
2
- import { create } from './index13.js';
3
- import { getLogger } from './index47.js';
4
- import { DEFAULT_TRANSCRIPTION_MODEL } from './index18.js';
1
+ import { create as create$1 } from './index60.js';
5
2
 
6
- class OpenAIError extends Error {
7
- constructor(message) {
8
- super(message);
9
- this.name = "OpenAIError";
10
- }
11
- }
12
- async function transcribeAudio(filePath, options = {}) {
13
- const logger = getLogger();
14
- const storage$1 = create({ log: logger.debug });
15
- try {
16
- const apiKey = process.env.OPENAI_API_KEY;
17
- if (!apiKey) {
18
- throw new OpenAIError("OPENAI_API_KEY environment variable is not set");
19
- }
20
- const openai = new OpenAI({
21
- apiKey
22
- });
23
- const model = options.model || DEFAULT_TRANSCRIPTION_MODEL;
24
- const fileName = filePath.split("/").pop() || filePath;
25
- logger.debug("Transcribing: %s (full path: %s)", fileName, filePath);
26
- const startTime = Date.now();
27
- const audioStream = await storage$1.readStream(filePath);
28
- const transcription = await openai.audio.transcriptions.create({
29
- model,
30
- file: audioStream,
31
- response_format: "json"
32
- });
33
- if (!transcription) {
34
- throw new OpenAIError("No transcription received from OpenAI");
35
- }
36
- const duration = ((Date.now() - startTime) / 1e3).toFixed(1);
37
- logger.info("%s (%ss, %d chars)", model, duration, transcription.text?.length || 0);
38
- if (options.debug && options.debugFile) {
39
- await storage$1.writeFile(options.debugFile, JSON.stringify(transcription, null, 2), "utf8");
40
- logger.debug("Wrote debug file to %s", options.debugFile);
41
- }
42
- logger.debug("Received transcription from OpenAI: %s", transcription);
43
- return transcription;
44
- } catch (error) {
45
- logger.error("Error transcribing audio file: %s %s", error.message, error.stack);
46
- throw new OpenAIError(`Failed to transcribe audio: ${error.message}`);
47
- }
48
- }
3
+ const create = (config) => {
4
+ return create$1(config);
5
+ };
49
6
 
50
- export { OpenAIError, transcribeAudio };
7
+ export { create };
51
8
  //# sourceMappingURL=index53.js.map
@@ -1 +1 @@
1
- {"version":3,"file":"index53.js","sources":["../src/util/openai.ts"],"sourcesContent":["import { OpenAI } from 'openai';\nimport { ChatCompletionCreateParamsNonStreaming, ChatCompletionMessageParam } from 'openai/resources/chat/completions';\nimport * as Storage from '@/util/storage';\nimport { getLogger } from '@/logging';\nimport { DEFAULT_MODEL, DEFAULT_TRANSCRIPTION_MODEL } from '@/constants';\n\nexport interface Transcription {\n text: string;\n}\n\nexport class OpenAIError extends Error {\n constructor(message: string) {\n super(message);\n this.name = 'OpenAIError';\n }\n}\n\n\nexport async function createCompletion(messages: ChatCompletionMessageParam[], options: { responseFormat?: any, model?: string, reasoningLevel?: 'none' | 'low' | 'medium' | 'high', maxTokens?: number, debug?: boolean, debugFile?: string, reason?: string } = {}): Promise<string | any> {\n const logger = getLogger();\n const storage = Storage.create({ log: logger.debug });\n try {\n const apiKey = process.env.OPENAI_API_KEY;\n if (!apiKey) {\n throw new OpenAIError('OPENAI_API_KEY environment variable is not set');\n }\n\n const openai = new OpenAI({\n apiKey: apiKey,\n });\n\n const model = options.model || DEFAULT_MODEL;\n \n // Check if model supports reasoning_effort\n const supportsReasoning = model.includes('gpt-5') || \n model.includes('o1') || model.includes('o3');\n const isReasoningCall = supportsReasoning && options.reasoningLevel && options.reasoningLevel !== 'none';\n \n logger.debug('Sending prompt to OpenAI: %j', messages);\n\n const startTime = Date.now();\n \n const requestParams: Record<string, unknown> = {\n model,\n messages,\n max_completion_tokens: options.maxTokens || 10000,\n response_format: options.responseFormat,\n };\n \n if (isReasoningCall) {\n requestParams.reasoning_effort = options.reasoningLevel;\n logger.debug('Using reasoning_effort: %s', options.reasoningLevel);\n }\n \n const completion = await openai.chat.completions.create(\n requestParams as unknown as ChatCompletionCreateParamsNonStreaming\n );\n const duration = ((Date.now() - startTime) / 1000).toFixed(1);\n\n // Log token usage with reason if provided\n const usage = completion.usage;\n const reasonSuffix = options.reason ? ` - ${options.reason}` : '';\n if (usage) {\n logger.info('%s (%ss, %d→%d tokens)%s', \n model, duration, usage.prompt_tokens, usage.completion_tokens, reasonSuffix);\n } else {\n logger.info('%s (%ss)%s', model, duration, reasonSuffix);\n }\n\n if (options.debug && options.debugFile) {\n await storage.writeFile(options.debugFile, JSON.stringify(completion, null, 2), 'utf8');\n logger.debug('Wrote debug file to %s', options.debugFile);\n }\n\n const response = completion.choices[0]?.message?.content?.trim();\n if (!response) {\n // Log the full completion object to help debug\n logger.error('Empty response from OpenAI. Full completion object: %j', completion);\n throw new OpenAIError('No response received from OpenAI');\n }\n\n logger.debug('Received response from OpenAI: %s', response);\n if (options.responseFormat) {\n return JSON.parse(response);\n } else {\n return response;\n }\n\n } catch (error: any) {\n logger.error('Error calling OpenAI API: %s %s', error.message, error.stack);\n throw new OpenAIError(`Failed to create completion: ${error.message}`);\n }\n}\n\nexport async function transcribeAudio(filePath: string, options: { model?: string, debug?: boolean, debugFile?: string } = {}): Promise<Transcription> {\n const logger = getLogger();\n const storage = Storage.create({ log: logger.debug });\n try {\n const apiKey = process.env.OPENAI_API_KEY;\n if (!apiKey) {\n throw new OpenAIError('OPENAI_API_KEY environment variable is not set');\n }\n\n const openai = new OpenAI({\n apiKey: apiKey,\n });\n\n const model = options.model || DEFAULT_TRANSCRIPTION_MODEL;\n const fileName = filePath.split('/').pop() || filePath;\n logger.debug('Transcribing: %s (full path: %s)', fileName, filePath);\n\n const startTime = Date.now();\n const audioStream = await storage.readStream(filePath);\n const transcription = await openai.audio.transcriptions.create({\n model,\n file: audioStream,\n response_format: \"json\",\n });\n \n if (!transcription) {\n throw new OpenAIError('No transcription received from OpenAI');\n }\n \n const duration = ((Date.now() - startTime) / 1000).toFixed(1);\n logger.info('%s (%ss, %d chars)', model, duration, transcription.text?.length || 0);\n\n if (options.debug && options.debugFile) {\n await storage.writeFile(options.debugFile, JSON.stringify(transcription, null, 2), 'utf8');\n logger.debug('Wrote debug file to %s', options.debugFile);\n }\n\n logger.debug('Received transcription from OpenAI: %s', transcription);\n return transcription;\n\n } catch (error: any) {\n logger.error('Error transcribing audio file: %s %s', error.message, error.stack);\n throw new OpenAIError(`Failed to transcribe audio: ${error.message}`);\n }\n}\n"],"names":["storage","Storage.create"],"mappings":";;;;;AAUO,MAAM,oBAAoB,KAAA,CAAM;AAAA,EACnC,YAAY,OAAA,EAAiB;AACzB,IAAA,KAAA,CAAM,OAAO,CAAA;AACb,IAAA,IAAA,CAAK,IAAA,GAAO,aAAA;AAAA,EAChB;AACJ;AA+EA,eAAsB,eAAA,CAAgB,QAAA,EAAkB,OAAA,GAAmE,EAAC,EAA2B;AACnJ,EAAA,MAAM,SAAS,SAAA,EAAU;AACzB,EAAA,MAAMA,YAAUC,MAAQ,CAAO,EAAE,GAAA,EAAK,MAAA,CAAO,OAAO,CAAA;AACpD,EAAA,IAAI;AACA,IAAA,MAAM,MAAA,GAAS,QAAQ,GAAA,CAAI,cAAA;AAC3B,IAAA,IAAI,CAAC,MAAA,EAAQ;AACT,MAAA,MAAM,IAAI,YAAY,gDAAgD,CAAA;AAAA,IAC1E;AAEA,IAAA,MAAM,MAAA,GAAS,IAAI,MAAA,CAAO;AAAA,MACtB;AAAA,KACH,CAAA;AAED,IAAA,MAAM,KAAA,GAAQ,QAAQ,KAAA,IAAS,2BAAA;AAC/B,IAAA,MAAM,WAAW,QAAA,CAAS,KAAA,CAAM,GAAG,CAAA,CAAE,KAAI,IAAK,QAAA;AAC9C,IAAA,MAAA,CAAO,KAAA,CAAM,kCAAA,EAAoC,QAAA,EAAU,QAAQ,CAAA;AAEnE,IAAA,MAAM,SAAA,GAAY,KAAK,GAAA,EAAI;AAC3B,IAAA,MAAM,WAAA,GAAc,MAAMD,SAAA,CAAQ,UAAA,CAAW,QAAQ,CAAA;AACrD,IAAA,MAAM,aAAA,GAAgB,MAAM,MAAA,CAAO,KAAA,CAAM,eAAe,MAAA,CAAO;AAAA,MAC3D,KAAA;AAAA,MACA,IAAA,EAAM,WAAA;AAAA,MACN,eAAA,EAAiB;AAAA,KACpB,CAAA;AAED,IAAA,IAAI,CAAC,aAAA,EAAe;AAChB,MAAA,MAAM,IAAI,YAAY,uCAAuC,CAAA;AAAA,IACjE;AAEA,IAAA,MAAM,aAAa,IAAA,CAAK,GAAA,KAAQ,SAAA,IAAa,GAAA,EAAM,QAAQ,CAAC,CAAA;AAC5D,IAAA,MAAA,CAAO,KAAK,oBAAA,EAAsB,KAAA,EAAO,UAAU,aAAA,CAAc,IAAA,EAAM,UAAU,CAAC,CAAA;AAElF,IAAA,IAAI,OAAA,CAAQ,KAAA,IAAS,OAAA,CAAQ,SAAA,EAAW;AACpC,MAAA,MAAMA,SAAA,CAAQ,SAAA,CAAU,OAAA,CAAQ,SAAA,EAAW,IAAA,CAAK,UAAU,aAAA,EAAe,IAAA,EAAM,CAAC,CAAA,EAAG,MAAM,CAAA;AACzF,MAAA,MAAA,CAAO,KAAA,CAAM,wBAAA,EAA0B,OAAA,CAAQ,SAAS,CAAA;AAAA,IAC5D;AAEA,IAAA,MAAA,CAAO,KAAA,CAAM,0CAA0C,aAAa,CAAA;AACpE,IAAA,OAAO,aAAA;AAAA,EAEX,SAAS,KAAA,EAAY;AACjB,IAAA,MAAA,CAAO,KAAA,CAAM,sCAAA,EAAwC,KAAA,CAAM,OAAA,EAAS,MAAM,KAAK,CAAA;AAC/E,IAAA,MAAM,IAAI,WAAA,CAAY,CAAA,4BAAA,EAA+B,KAAA,CAAM,OAAO,CAAA,CAAE,CAAA;AAAA,EACxE;AACJ;;;;"}
1
+ {"version":3,"file":"index53.js","sources":["../src/out/index.ts"],"sourcesContent":["/**\n * Output Management System\n *\n * Main entry point for the output management system. Handles intermediate\n * files and final output destinations.\n */\n\nimport { OutputConfig, OutputPaths, IntermediateFiles, RawTranscriptData } from './types';\nimport * as Manager from './manager';\nimport * as Metadata from '../util/metadata';\n\nexport interface OutputInstance {\n createOutputPaths(\n audioFile: string,\n routedDestination: string,\n hash: string,\n date: Date\n ): OutputPaths;\n ensureDirectories(paths: OutputPaths): Promise<void>;\n writeIntermediate(\n paths: OutputPaths,\n type: keyof IntermediateFiles,\n content: unknown\n ): Promise<string>;\n /**\n * Write the raw Whisper transcript to the .transcript/ directory alongside final output.\n * This enables compare and reanalyze workflows.\n */\n writeRawTranscript(paths: OutputPaths, data: RawTranscriptData): Promise<string>;\n writeTranscript(paths: OutputPaths, content: string, metadata?: Metadata.TranscriptMetadata): Promise<string>;\n /**\n * Read a previously stored raw transcript from the .transcript/ directory.\n * Returns null if no raw transcript exists.\n */\n readRawTranscript(finalOutputPath: string): Promise<RawTranscriptData | null>;\n cleanIntermediates(paths: OutputPaths): Promise<void>;\n}\n\nexport const create = (config: OutputConfig): OutputInstance => {\n return Manager.create(config);\n};\n\nexport const DEFAULT_OUTPUT_CONFIG: OutputConfig = {\n intermediateDir: './output/protokoll',\n keepIntermediates: true,\n timestampFormat: 'YYMMDD-HHmm',\n};\n\n// Re-export types\nexport * from './types';\n"],"names":["Manager.create"],"mappings":";;AAsCO,MAAM,MAAA,GAAS,CAAC,MAAA,KAAyC;AAC5D,EAAA,OAAOA,SAAe,MAAM,CAAA;AAChC;;;;"}
package/dist/index54.js CHANGED
@@ -1,39 +1,51 @@
1
- const stringifyJSON = function(obj) {
2
- const arrOfKeyVals = [];
3
- const arrVals = [];
4
- let objKeys = [];
5
- if (typeof obj === "number" || typeof obj === "boolean" || obj === null)
6
- return "" + obj;
7
- else if (typeof obj === "string")
8
- return '"' + obj + '"';
9
- else if (Array.isArray(obj)) {
10
- if (obj[0] === void 0)
11
- return "[]";
12
- else {
13
- obj.forEach(function(el) {
14
- arrVals.push(stringifyJSON(el));
15
- });
16
- return "[" + arrVals + "]";
1
+ import { OpenAI } from 'openai';
2
+ import { create } from './index13.js';
3
+ import { getLogger } from './index47.js';
4
+ import { DEFAULT_TRANSCRIPTION_MODEL } from './index18.js';
5
+
6
+ class OpenAIError extends Error {
7
+ constructor(message) {
8
+ super(message);
9
+ this.name = "OpenAIError";
10
+ }
11
+ }
12
+ async function transcribeAudio(filePath, options = {}) {
13
+ const logger = getLogger();
14
+ const storage$1 = create({ log: logger.debug });
15
+ try {
16
+ const apiKey = process.env.OPENAI_API_KEY;
17
+ if (!apiKey) {
18
+ throw new OpenAIError("OPENAI_API_KEY environment variable is not set");
17
19
  }
18
- } else if (obj instanceof Object) {
19
- objKeys = Object.keys(obj);
20
- objKeys.forEach(function(key) {
21
- const keyOut = '"' + key + '":';
22
- const keyValOut = obj[key];
23
- if (keyValOut instanceof Function || keyValOut === void 0)
24
- arrOfKeyVals.push("");
25
- else if (typeof keyValOut === "string")
26
- arrOfKeyVals.push(keyOut + '"' + keyValOut + '"');
27
- else if (typeof keyValOut === "boolean" || typeof keyValOut === "number" || keyValOut === null)
28
- arrOfKeyVals.push(keyOut + keyValOut);
29
- else if (keyValOut instanceof Object) {
30
- arrOfKeyVals.push(keyOut + stringifyJSON(keyValOut));
31
- }
20
+ const openai = new OpenAI({
21
+ apiKey
22
+ });
23
+ const model = options.model || DEFAULT_TRANSCRIPTION_MODEL;
24
+ const fileName = filePath.split("/").pop() || filePath;
25
+ logger.debug("Transcribing: %s (full path: %s)", fileName, filePath);
26
+ const startTime = Date.now();
27
+ const audioStream = await storage$1.readStream(filePath);
28
+ const transcription = await openai.audio.transcriptions.create({
29
+ model,
30
+ file: audioStream,
31
+ response_format: "json"
32
32
  });
33
- return "{" + arrOfKeyVals + "}";
33
+ if (!transcription) {
34
+ throw new OpenAIError("No transcription received from OpenAI");
35
+ }
36
+ const duration = ((Date.now() - startTime) / 1e3).toFixed(1);
37
+ logger.info("%s (%ss, %d chars)", model, duration, transcription.text?.length || 0);
38
+ if (options.debug && options.debugFile) {
39
+ await storage$1.writeFile(options.debugFile, JSON.stringify(transcription, null, 2), "utf8");
40
+ logger.debug("Wrote debug file to %s", options.debugFile);
41
+ }
42
+ logger.debug("Received transcription from OpenAI: %s", transcription);
43
+ return transcription;
44
+ } catch (error) {
45
+ logger.error("Error transcribing audio file: %s %s", error.message, error.stack);
46
+ throw new OpenAIError(`Failed to transcribe audio: ${error.message}`);
34
47
  }
35
- return "";
36
- };
48
+ }
37
49
 
38
- export { stringifyJSON };
50
+ export { OpenAIError, transcribeAudio };
39
51
  //# sourceMappingURL=index54.js.map
@@ -1 +1 @@
1
- {"version":3,"file":"index54.js","sources":["../src/util/general.ts"],"sourcesContent":["// Utility function for deep merging two objects.\nexport function deepMerge(target: any, source: any): any {\n for (const key in source) {\n if (Object.prototype.hasOwnProperty.call(source, key)) {\n // Block prototype-polluting keys\n if (key === '__proto__' || key === 'constructor') {\n continue;\n }\n if (source[key] && typeof source[key] === 'object' && !Array.isArray(source[key])) {\n if (!target[key]) {\n target[key] = {};\n }\n deepMerge(target[key], source[key]);\n } else {\n target[key] = source[key];\n }\n }\n }\n return target;\n}\n\n//Recursive implementation of jSON.stringify;\nexport const stringifyJSON = function (obj: any): string {\n\n const arrOfKeyVals: string[] = [];\n const arrVals: string[] = [];\n let objKeys: string[] = [];\n\n /*********CHECK FOR PRIMITIVE TYPES**********/\n if (typeof obj === 'number' || typeof obj === 'boolean' || obj === null)\n return '' + obj;\n else if (typeof obj === 'string')\n return '\"' + obj + '\"';\n\n /*********CHECK FOR ARRAY**********/\n else if (Array.isArray(obj)) {\n //check for empty array\n if (obj[0] === undefined)\n return '[]';\n else {\n obj.forEach(function (el) {\n arrVals.push(stringifyJSON(el));\n });\n return '[' + arrVals + ']';\n }\n }\n /*********CHECK FOR OBJECT**********/\n else if (obj instanceof Object) {\n //get object keys\n objKeys = Object.keys(obj);\n //set key output;\n objKeys.forEach(function (key) {\n const keyOut = '\"' + key + '\":';\n const keyValOut = obj[key];\n //skip functions and undefined properties\n if (keyValOut instanceof Function || keyValOut === undefined)\n arrOfKeyVals.push('');\n else if (typeof keyValOut === 'string')\n arrOfKeyVals.push(keyOut + '\"' + keyValOut + '\"');\n else if (typeof keyValOut === 'boolean' || typeof keyValOut === 'number' || keyValOut === null)\n arrOfKeyVals.push(keyOut + keyValOut);\n //check for nested objects, call recursively until no more objects\n else if (keyValOut instanceof Object) {\n arrOfKeyVals.push(keyOut + stringifyJSON(keyValOut));\n }\n });\n return '{' + arrOfKeyVals + '}';\n }\n return '';\n};"],"names":[],"mappings":"AAsBO,MAAM,aAAA,GAAgB,SAAU,GAAA,EAAkB;AAErD,EAAA,MAAM,eAAyB,EAAC;AAChC,EAAA,MAAM,UAAoB,EAAC;AAC3B,EAAA,IAAI,UAAoB,EAAC;AAGzB,EAAA,IAAI,OAAO,GAAA,KAAQ,QAAA,IAAY,OAAO,GAAA,KAAQ,aAAa,GAAA,KAAQ,IAAA;AAC/D,IAAA,OAAO,EAAA,GAAK,GAAA;AAAA,OAAA,IACP,OAAO,GAAA,KAAQ,QAAA;AACpB,IAAA,OAAO,MAAM,GAAA,GAAM,GAAA;AAAA,OAAA,IAGd,KAAA,CAAM,OAAA,CAAQ,GAAG,CAAA,EAAG;AAEzB,IAAA,IAAI,GAAA,CAAI,CAAC,CAAA,KAAM,MAAA;AACX,MAAA,OAAO,IAAA;AAAA,SACN;AACD,MAAA,GAAA,CAAI,OAAA,CAAQ,SAAU,EAAA,EAAI;AACtB,QAAA,OAAA,CAAQ,IAAA,CAAK,aAAA,CAAc,EAAE,CAAC,CAAA;AAAA,MAClC,CAAC,CAAA;AACD,MAAA,OAAO,MAAM,OAAA,GAAU,GAAA;AAAA,IAC3B;AAAA,EACJ,CAAA,MAAA,IAES,eAAe,MAAA,EAAQ;AAE5B,IAAA,OAAA,GAAU,MAAA,CAAO,KAAK,GAAG,CAAA;AAEzB,IAAA,OAAA,CAAQ,OAAA,CAAQ,SAAU,GAAA,EAAK;AAC3B,MAAA,MAAM,MAAA,GAAS,MAAM,GAAA,GAAM,IAAA;AAC3B,MAAA,MAAM,SAAA,GAAY,IAAI,GAAG,CAAA;AAEzB,MAAA,IAAI,SAAA,YAAqB,YAAY,SAAA,KAAc,MAAA;AAC/C,QAAA,YAAA,CAAa,KAAK,EAAE,CAAA;AAAA,WAAA,IACf,OAAO,SAAA,KAAc,QAAA;AAC1B,QAAA,YAAA,CAAa,IAAA,CAAK,MAAA,GAAS,GAAA,GAAM,SAAA,GAAY,GAAG,CAAA;AAAA,WAAA,IAC3C,OAAO,SAAA,KAAc,SAAA,IAAa,OAAO,SAAA,KAAc,YAAY,SAAA,KAAc,IAAA;AACtF,QAAA,YAAA,CAAa,IAAA,CAAK,SAAS,SAAS,CAAA;AAAA,WAAA,IAE/B,qBAAqB,MAAA,EAAQ;AAClC,QAAA,YAAA,CAAa,IAAA,CAAK,MAAA,GAAS,aAAA,CAAc,SAAS,CAAC,CAAA;AAAA,MACvD;AAAA,IACJ,CAAC,CAAA;AACD,IAAA,OAAO,MAAM,YAAA,GAAe,GAAA;AAAA,EAChC;AACA,EAAA,OAAO,EAAA;AACX;;;;"}
1
+ {"version":3,"file":"index54.js","sources":["../src/util/openai.ts"],"sourcesContent":["import { OpenAI } from 'openai';\nimport { ChatCompletionCreateParamsNonStreaming, ChatCompletionMessageParam } from 'openai/resources/chat/completions';\nimport * as Storage from '@/util/storage';\nimport { getLogger } from '@/logging';\nimport { DEFAULT_MODEL, DEFAULT_TRANSCRIPTION_MODEL } from '@/constants';\n\nexport interface Transcription {\n text: string;\n}\n\nexport class OpenAIError extends Error {\n constructor(message: string) {\n super(message);\n this.name = 'OpenAIError';\n }\n}\n\n\nexport async function createCompletion(messages: ChatCompletionMessageParam[], options: { responseFormat?: any, model?: string, reasoningLevel?: 'none' | 'low' | 'medium' | 'high', maxTokens?: number, debug?: boolean, debugFile?: string, reason?: string } = {}): Promise<string | any> {\n const logger = getLogger();\n const storage = Storage.create({ log: logger.debug });\n try {\n const apiKey = process.env.OPENAI_API_KEY;\n if (!apiKey) {\n throw new OpenAIError('OPENAI_API_KEY environment variable is not set');\n }\n\n const openai = new OpenAI({\n apiKey: apiKey,\n });\n\n const model = options.model || DEFAULT_MODEL;\n \n // Check if model supports reasoning_effort\n const supportsReasoning = model.includes('gpt-5') || \n model.includes('o1') || model.includes('o3');\n const isReasoningCall = supportsReasoning && options.reasoningLevel && options.reasoningLevel !== 'none';\n \n logger.debug('Sending prompt to OpenAI: %j', messages);\n\n const startTime = Date.now();\n \n const requestParams: Record<string, unknown> = {\n model,\n messages,\n max_completion_tokens: options.maxTokens || 10000,\n response_format: options.responseFormat,\n };\n \n if (isReasoningCall) {\n requestParams.reasoning_effort = options.reasoningLevel;\n logger.debug('Using reasoning_effort: %s', options.reasoningLevel);\n }\n \n const completion = await openai.chat.completions.create(\n requestParams as unknown as ChatCompletionCreateParamsNonStreaming\n );\n const duration = ((Date.now() - startTime) / 1000).toFixed(1);\n\n // Log token usage with reason if provided\n const usage = completion.usage;\n const reasonSuffix = options.reason ? ` - ${options.reason}` : '';\n if (usage) {\n logger.info('%s (%ss, %d→%d tokens)%s', \n model, duration, usage.prompt_tokens, usage.completion_tokens, reasonSuffix);\n } else {\n logger.info('%s (%ss)%s', model, duration, reasonSuffix);\n }\n\n if (options.debug && options.debugFile) {\n await storage.writeFile(options.debugFile, JSON.stringify(completion, null, 2), 'utf8');\n logger.debug('Wrote debug file to %s', options.debugFile);\n }\n\n const response = completion.choices[0]?.message?.content?.trim();\n if (!response) {\n // Log the full completion object to help debug\n logger.error('Empty response from OpenAI. Full completion object: %j', completion);\n throw new OpenAIError('No response received from OpenAI');\n }\n\n logger.debug('Received response from OpenAI: %s', response);\n if (options.responseFormat) {\n return JSON.parse(response);\n } else {\n return response;\n }\n\n } catch (error: any) {\n logger.error('Error calling OpenAI API: %s %s', error.message, error.stack);\n throw new OpenAIError(`Failed to create completion: ${error.message}`);\n }\n}\n\nexport async function transcribeAudio(filePath: string, options: { model?: string, debug?: boolean, debugFile?: string } = {}): Promise<Transcription> {\n const logger = getLogger();\n const storage = Storage.create({ log: logger.debug });\n try {\n const apiKey = process.env.OPENAI_API_KEY;\n if (!apiKey) {\n throw new OpenAIError('OPENAI_API_KEY environment variable is not set');\n }\n\n const openai = new OpenAI({\n apiKey: apiKey,\n });\n\n const model = options.model || DEFAULT_TRANSCRIPTION_MODEL;\n const fileName = filePath.split('/').pop() || filePath;\n logger.debug('Transcribing: %s (full path: %s)', fileName, filePath);\n\n const startTime = Date.now();\n const audioStream = await storage.readStream(filePath);\n const transcription = await openai.audio.transcriptions.create({\n model,\n file: audioStream,\n response_format: \"json\",\n });\n \n if (!transcription) {\n throw new OpenAIError('No transcription received from OpenAI');\n }\n \n const duration = ((Date.now() - startTime) / 1000).toFixed(1);\n logger.info('%s (%ss, %d chars)', model, duration, transcription.text?.length || 0);\n\n if (options.debug && options.debugFile) {\n await storage.writeFile(options.debugFile, JSON.stringify(transcription, null, 2), 'utf8');\n logger.debug('Wrote debug file to %s', options.debugFile);\n }\n\n logger.debug('Received transcription from OpenAI: %s', transcription);\n return transcription;\n\n } catch (error: any) {\n logger.error('Error transcribing audio file: %s %s', error.message, error.stack);\n throw new OpenAIError(`Failed to transcribe audio: ${error.message}`);\n }\n}\n"],"names":["storage","Storage.create"],"mappings":";;;;;AAUO,MAAM,oBAAoB,KAAA,CAAM;AAAA,EACnC,YAAY,OAAA,EAAiB;AACzB,IAAA,KAAA,CAAM,OAAO,CAAA;AACb,IAAA,IAAA,CAAK,IAAA,GAAO,aAAA;AAAA,EAChB;AACJ;AA+EA,eAAsB,eAAA,CAAgB,QAAA,EAAkB,OAAA,GAAmE,EAAC,EAA2B;AACnJ,EAAA,MAAM,SAAS,SAAA,EAAU;AACzB,EAAA,MAAMA,YAAUC,MAAQ,CAAO,EAAE,GAAA,EAAK,MAAA,CAAO,OAAO,CAAA;AACpD,EAAA,IAAI;AACA,IAAA,MAAM,MAAA,GAAS,QAAQ,GAAA,CAAI,cAAA;AAC3B,IAAA,IAAI,CAAC,MAAA,EAAQ;AACT,MAAA,MAAM,IAAI,YAAY,gDAAgD,CAAA;AAAA,IAC1E;AAEA,IAAA,MAAM,MAAA,GAAS,IAAI,MAAA,CAAO;AAAA,MACtB;AAAA,KACH,CAAA;AAED,IAAA,MAAM,KAAA,GAAQ,QAAQ,KAAA,IAAS,2BAAA;AAC/B,IAAA,MAAM,WAAW,QAAA,CAAS,KAAA,CAAM,GAAG,CAAA,CAAE,KAAI,IAAK,QAAA;AAC9C,IAAA,MAAA,CAAO,KAAA,CAAM,kCAAA,EAAoC,QAAA,EAAU,QAAQ,CAAA;AAEnE,IAAA,MAAM,SAAA,GAAY,KAAK,GAAA,EAAI;AAC3B,IAAA,MAAM,WAAA,GAAc,MAAMD,SAAA,CAAQ,UAAA,CAAW,QAAQ,CAAA;AACrD,IAAA,MAAM,aAAA,GAAgB,MAAM,MAAA,CAAO,KAAA,CAAM,eAAe,MAAA,CAAO;AAAA,MAC3D,KAAA;AAAA,MACA,IAAA,EAAM,WAAA;AAAA,MACN,eAAA,EAAiB;AAAA,KACpB,CAAA;AAED,IAAA,IAAI,CAAC,aAAA,EAAe;AAChB,MAAA,MAAM,IAAI,YAAY,uCAAuC,CAAA;AAAA,IACjE;AAEA,IAAA,MAAM,aAAa,IAAA,CAAK,GAAA,KAAQ,SAAA,IAAa,GAAA,EAAM,QAAQ,CAAC,CAAA;AAC5D,IAAA,MAAA,CAAO,KAAK,oBAAA,EAAsB,KAAA,EAAO,UAAU,aAAA,CAAc,IAAA,EAAM,UAAU,CAAC,CAAA;AAElF,IAAA,IAAI,OAAA,CAAQ,KAAA,IAAS,OAAA,CAAQ,SAAA,EAAW;AACpC,MAAA,MAAMA,SAAA,CAAQ,SAAA,CAAU,OAAA,CAAQ,SAAA,EAAW,IAAA,CAAK,UAAU,aAAA,EAAe,IAAA,EAAM,CAAC,CAAA,EAAG,MAAM,CAAA;AACzF,MAAA,MAAA,CAAO,KAAA,CAAM,wBAAA,EAA0B,OAAA,CAAQ,SAAS,CAAA;AAAA,IAC5D;AAEA,IAAA,MAAA,CAAO,KAAA,CAAM,0CAA0C,aAAa,CAAA;AACpE,IAAA,OAAO,aAAA;AAAA,EAEX,SAAS,KAAA,EAAY;AACjB,IAAA,MAAA,CAAO,KAAA,CAAM,sCAAA,EAAwC,KAAA,CAAM,OAAA,EAAS,MAAM,KAAK,CAAA;AAC/E,IAAA,MAAM,IAAI,WAAA,CAAY,CAAA,4BAAA,EAA+B,KAAA,CAAM,OAAO,CAAA,CAAE,CAAA;AAAA,EACxE;AACJ;;;;"}
package/dist/index55.js CHANGED
@@ -1,239 +1,39 @@
1
- import { getLogger } from './index47.js';
2
- import * as yaml from 'js-yaml';
3
- import * as fs from 'fs/promises';
4
- import * as path from 'node:path';
5
- import * as os from 'node:os';
6
-
7
- const create = (config) => {
8
- const logger = getLogger();
9
- const database = {
10
- mappings: [],
11
- tier1: [],
12
- tier2: /* @__PURE__ */ new Map(),
13
- tier3: [],
14
- collisions: /* @__PURE__ */ new Map(),
15
- commonTerms: new Set(DEFAULT_COMMON_TERMS),
16
- genericTerms: new Set(DEFAULT_GENERIC_TERMS)
17
- };
18
- const findProtokolDirectories = async () => {
19
- const homeDir = os.homedir();
20
- const primaryPath = path.join(homeDir, ".protokoll", "context");
21
- const dirs = [];
22
- try {
23
- await fs.access(primaryPath);
24
- dirs.push(primaryPath);
25
- logger.debug(`Found protokoll context at: ${primaryPath}`);
26
- } catch {
27
- logger.debug(`No protokoll context found at: ${primaryPath}`);
28
- }
29
- return dirs;
30
- };
31
- const loadProjectsFromProtokoll = async () => {
32
- logger.debug("Loading projects from protokoll context");
33
- const contextDirs = await findProtokolDirectories();
34
- if (contextDirs.length === 0) {
35
- logger.warn("No protokoll context directories found");
36
- return [];
37
- }
38
- const mappings = [];
39
- for (const contextDir of contextDirs) {
40
- const projectsDir = path.join(contextDir, "projects");
41
- try {
42
- const files = await fs.readdir(projectsDir);
43
- for (const file of files) {
44
- if (!file.endsWith(".yaml") && !file.endsWith(".yml")) continue;
45
- try {
46
- const content = await fs.readFile(path.join(projectsDir, file), "utf-8");
47
- const parsed = yaml.load(content);
48
- if (!parsed || !parsed.id || !parsed.name) {
49
- logger.debug(`Skipping invalid project file: ${file}`);
50
- continue;
51
- }
52
- if (parsed.active === false) {
53
- logger.debug(`Skipping inactive project: ${parsed.id}`);
54
- continue;
55
- }
56
- if (parsed.sounds_like && parsed.sounds_like.length > 0) {
57
- for (const soundsLike of parsed.sounds_like) {
58
- mappings.push({
59
- soundsLike: soundsLike.toLowerCase(),
60
- correctText: parsed.name,
61
- entityType: "project",
62
- entityId: parsed.id,
63
- scopedToProjects: null,
64
- // Will be determined by collision detection
65
- collisionRisk: "none",
66
- // Will be determined by collision detection
67
- tier: 1
68
- // Will be determined by collision detection
69
- });
70
- }
71
- logger.debug(`Loaded ${parsed.sounds_like.length} sounds_like entries for project: ${parsed.id}`);
72
- }
73
- } catch (error) {
74
- logger.warn(`Failed to parse project file ${file}: ${error.message}`);
75
- }
76
- }
77
- } catch (error) {
78
- logger.debug(`Could not read projects directory ${projectsDir}: ${error.message}`);
79
- }
80
- }
81
- logger.info(`Loaded ${mappings.length} sounds_like mappings from protokoll projects`);
82
- return mappings;
83
- };
84
- const detectCollisions = (mappings) => {
85
- const collisionMap = /* @__PURE__ */ new Map();
86
- for (const mapping of mappings) {
87
- const key = mapping.soundsLike.toLowerCase();
88
- if (!collisionMap.has(key)) {
89
- collisionMap.set(key, []);
90
- }
91
- collisionMap.get(key).push(mapping);
92
- }
93
- const collisions = /* @__PURE__ */ new Map();
94
- for (const [soundsLike, conflictMappings] of collisionMap) {
95
- if (conflictMappings.length > 1) {
96
- collisions.set(soundsLike, {
97
- soundsLike,
98
- mappings: conflictMappings,
99
- count: conflictMappings.length
100
- });
101
- logger.debug(`Collision detected for "${soundsLike}": ${conflictMappings.length} mappings`);
102
- }
103
- }
104
- logger.info(`Detected ${collisions.size} collisions in sounds_like mappings`);
105
- return collisions;
106
- };
107
- const classifyTier = (mapping) => {
108
- if (!mapping.soundsLike) {
109
- return 3;
110
- }
111
- const soundsLikeLower = mapping.soundsLike.toLowerCase();
112
- if (database.genericTerms.has(soundsLikeLower)) {
113
- return 3;
114
- }
115
- if (database.commonTerms.has(soundsLikeLower)) {
116
- return 2;
117
- }
118
- if (database.collisions.has(soundsLikeLower)) {
119
- return 2;
120
- }
121
- return 1;
122
- };
123
- const assignTiersAndCollisions = (mappings) => {
124
- for (const mapping of mappings) {
125
- mapping.tier = classifyTier(mapping);
126
- if (database.collisions.has(mapping.soundsLike.toLowerCase())) {
127
- mapping.collisionRisk = "high";
128
- } else if (database.commonTerms.has(mapping.soundsLike.toLowerCase())) {
129
- mapping.collisionRisk = "medium";
130
- } else if (mapping.tier === 2) {
131
- mapping.collisionRisk = "low";
132
- } else {
133
- mapping.collisionRisk = "none";
134
- }
135
- if (mapping.tier === 2 && mapping.entityType === "project") {
136
- mapping.scopedToProjects = [mapping.entityId];
137
- mapping.minConfidence = 0.6;
1
+ const stringifyJSON = function(obj) {
2
+ const arrOfKeyVals = [];
3
+ const arrVals = [];
4
+ let objKeys = [];
5
+ if (typeof obj === "number" || typeof obj === "boolean" || obj === null)
6
+ return "" + obj;
7
+ else if (typeof obj === "string")
8
+ return '"' + obj + '"';
9
+ else if (Array.isArray(obj)) {
10
+ if (obj[0] === void 0)
11
+ return "[]";
12
+ else {
13
+ obj.forEach(function(el) {
14
+ arrVals.push(stringifyJSON(el));
15
+ });
16
+ return "[" + arrVals + "]";
17
+ }
18
+ } else if (obj instanceof Object) {
19
+ objKeys = Object.keys(obj);
20
+ objKeys.forEach(function(key) {
21
+ const keyOut = '"' + key + '":';
22
+ const keyValOut = obj[key];
23
+ if (keyValOut instanceof Function || keyValOut === void 0)
24
+ arrOfKeyVals.push("");
25
+ else if (typeof keyValOut === "string")
26
+ arrOfKeyVals.push(keyOut + '"' + keyValOut + '"');
27
+ else if (typeof keyValOut === "boolean" || typeof keyValOut === "number" || keyValOut === null)
28
+ arrOfKeyVals.push(keyOut + keyValOut);
29
+ else if (keyValOut instanceof Object) {
30
+ arrOfKeyVals.push(keyOut + stringifyJSON(keyValOut));
138
31
  }
139
- logger.debug(
140
- `Classified "${mapping.soundsLike}" "${mapping.correctText}" (${mapping.entityType}:${mapping.entityId}) as Tier ${mapping.tier} (risk: ${mapping.collisionRisk})`
141
- );
142
- }
143
- };
144
- const organizeMappingsByTier = (mappings) => {
145
- database.tier1 = [];
146
- database.tier2 = /* @__PURE__ */ new Map();
147
- database.tier3 = [];
148
- for (const mapping of mappings) {
149
- if (mapping.tier === 1) {
150
- database.tier1.push(mapping);
151
- } else if (mapping.tier === 2) {
152
- if (mapping.entityType === "project") {
153
- if (!database.tier2.has(mapping.entityId)) {
154
- database.tier2.set(mapping.entityId, []);
155
- }
156
- database.tier2.get(mapping.entityId).push(mapping);
157
- } else {
158
- if (!database.tier2.has("_generic")) {
159
- database.tier2.set("_generic", []);
160
- }
161
- database.tier2.get("_generic").push(mapping);
162
- }
163
- } else {
164
- database.tier3.push(mapping);
165
- }
166
- }
167
- logger.info(
168
- `Organized mappings: Tier 1=${database.tier1.length}, Tier 2=${Array.from(database.tier2.values()).reduce((sum, arr) => sum + arr.length, 0)}, Tier 3=${database.tier3.length}`
169
- );
170
- };
171
- const load = async () => {
172
- logger.info("Loading sounds_like database");
173
- const projectMappings = await loadProjectsFromProtokoll();
174
- const allMappings = [
175
- ...projectMappings
176
- // ...peopleMappings,
177
- // ...termMappings,
178
- ];
179
- database.mappings = allMappings;
180
- {
181
- database.collisions = detectCollisions(allMappings);
182
- }
183
- assignTiersAndCollisions(allMappings);
184
- organizeMappingsByTier(allMappings);
185
- logger.info(`Sounds_like database loaded: ${allMappings.length} total mappings`);
186
- return database;
187
- };
188
- const getTier1Mappings = () => {
189
- return database.tier1;
190
- };
191
- const getTier2MappingsForProject = (projectId) => {
192
- const projectMappings = database.tier2.get(projectId) ?? [];
193
- const genericMappings = database.tier2.get("_generic") ?? [];
194
- return [...projectMappings, ...genericMappings];
195
- };
196
- const hasCollision = (soundsLike) => {
197
- return database.collisions.has(soundsLike.toLowerCase());
198
- };
199
- const getCollision = (soundsLike) => {
200
- return database.collisions.get(soundsLike.toLowerCase());
201
- };
202
- const getAllCollisions = () => {
203
- return Array.from(database.collisions.values());
204
- };
205
- return {
206
- load,
207
- getTier1Mappings,
208
- getTier2MappingsForProject,
209
- hasCollision,
210
- getCollision,
211
- getAllCollisions,
212
- classifyTier
213
- };
32
+ });
33
+ return "{" + arrOfKeyVals + "}";
34
+ }
35
+ return "";
214
36
  };
215
- const DEFAULT_COMMON_TERMS = [
216
- "protocol",
217
- "observation",
218
- "composition",
219
- "gateway",
220
- "service",
221
- "system",
222
- "platform"
223
- ];
224
- const DEFAULT_GENERIC_TERMS = [
225
- "meeting",
226
- "update",
227
- "work",
228
- "project",
229
- "task",
230
- "issue",
231
- "discussion",
232
- "review",
233
- "the",
234
- "a",
235
- "an"
236
- ];
237
37
 
238
- export { create };
38
+ export { stringifyJSON };
239
39
  //# sourceMappingURL=index55.js.map
@@ -1 +1 @@
1
- {"version":3,"file":"index55.js","sources":["../src/util/sounds-like-database.ts"],"sourcesContent":["/**\n * Sounds-Like Database\n *\n * Aggregates sounds_like mappings from multiple sources (projects, people, terms)\n * and provides efficient lookup and collision detection for entity correction.\n *\n * Part of the simple-replace optimization (Phase 1).\n */\n\nimport * as Logging from '@/logging';\nimport * as yaml from 'js-yaml';\nimport * as fs from 'fs/promises';\nimport * as path from 'node:path';\nimport * as os from 'node:os';\n\n/**\n * Represents a single sounds_like mapping entry\n */\nexport interface SoundsLikeMapping {\n /** What Whisper typically hears (e.g., \"protocol\", \"observation\") */\n soundsLike: string;\n\n /** Correct text to replace with (e.g., \"Protokoll\", \"Observasjon\") */\n correctText: string;\n\n /** Type of entity (project, person, or term) */\n entityType: 'project' | 'person' | 'term';\n\n /** Unique identifier for the entity */\n entityId: string;\n\n /** Only apply replacement in these project contexts (null = apply everywhere) */\n scopedToProjects?: string[] | null;\n\n /** Collision risk level: none (Tier 1), low/medium (Tier 2), high (Tier 3) */\n collisionRisk: 'none' | 'low' | 'medium' | 'high';\n\n /** Tier classification (1 = always safe, 2 = project-scoped, 3 = ambiguous) */\n tier: 1 | 2 | 3;\n\n /** Minimum confidence required for Tier 2 replacements */\n minConfidence?: number;\n}\n\n/**\n * Collision information for a sounds_like value\n */\nexport interface Collision {\n /** The sounds_like value that has collisions */\n soundsLike: string;\n\n /** All mappings that share this sounds_like value */\n mappings: SoundsLikeMapping[];\n\n /** Number of conflicting mappings */\n count: number;\n}\n\n/**\n * Database of sounds_like mappings with collision detection\n */\nexport interface SoundsLikeDatabase {\n /** All loaded mappings */\n mappings: SoundsLikeMapping[];\n\n /** Tier 1 mappings (always safe to apply) */\n tier1: SoundsLikeMapping[];\n\n /** Tier 2 mappings (require project-scoping) */\n tier2: Map<string, SoundsLikeMapping[]>; // Keyed by project ID\n\n /** Tier 3 mappings (too ambiguous, skip) */\n tier3: SoundsLikeMapping[];\n\n /** Detected collisions */\n collisions: Map<string, Collision>;\n\n /** Common terms that should not be replaced */\n commonTerms: Set<string>;\n\n /** Generic terms to always skip (Tier 3) */\n genericTerms: Set<string>;\n}\n\n/**\n * Configuration for the sounds-like database\n */\nexport interface DatabaseConfig {\n /** Protokoll context directories to load from */\n protokollContextPaths?: string[];\n\n /** Confidence threshold for Tier 2 replacements */\n tier2Confidence?: number;\n\n /** Enable collision detection */\n detectCollisions?: boolean;\n\n /** Custom common terms list */\n commonTerms?: string[];\n\n /** Custom generic terms list */\n genericTerms?: string[];\n}\n\n/**\n * Instance interface for the sounds-like database\n */\nexport interface Instance {\n /** Load all sounds_like mappings from sources */\n load(): Promise<SoundsLikeDatabase>;\n\n /** Get all Tier 1 (always safe) mappings */\n getTier1Mappings(): SoundsLikeMapping[];\n\n /** Get Tier 2 (project-scoped) mappings for a specific project */\n getTier2MappingsForProject(projectId: string): SoundsLikeMapping[];\n\n /** Check if a sounds_like value has collisions */\n hasCollision(soundsLike: string): boolean;\n\n /** Get collision info for a sounds_like value */\n getCollision(soundsLike: string): Collision | undefined;\n\n /** Get all collisions */\n getAllCollisions(): Collision[];\n\n /** Classify a mapping into a tier based on collision risk */\n classifyTier(mapping: Partial<SoundsLikeMapping>): 1 | 2 | 3;\n}\n\ninterface ProtokolProject {\n id: string;\n name: string;\n type: 'project';\n sounds_like?: string[];\n classification?: {\n context_type?: 'work' | 'personal' | 'mixed';\n };\n active?: boolean;\n}\n\n/**\n * Create a sounds-like database instance\n */\nexport const create = (config?: DatabaseConfig): Instance => {\n const logger = Logging.getLogger();\n\n const database: SoundsLikeDatabase = {\n mappings: [],\n tier1: [],\n tier2: new Map(),\n tier3: [],\n collisions: new Map(),\n commonTerms: new Set(config?.commonTerms ?? DEFAULT_COMMON_TERMS),\n genericTerms: new Set(config?.genericTerms ?? DEFAULT_GENERIC_TERMS),\n };\n\n /**\n * Find protokoll context directories\n */\n const findProtokolDirectories = async (): Promise<string[]> => {\n if (config?.protokollContextPaths) {\n return config.protokollContextPaths;\n }\n\n const homeDir = os.homedir();\n const primaryPath = path.join(homeDir, '.protokoll', 'context');\n\n const dirs: string[] = [];\n\n // Check primary protokoll directory\n try {\n await fs.access(primaryPath);\n dirs.push(primaryPath);\n logger.debug(`Found protokoll context at: ${primaryPath}`);\n } catch {\n logger.debug(`No protokoll context found at: ${primaryPath}`);\n }\n\n return dirs;\n };\n\n /**\n * Load projects from protokoll context\n */\n const loadProjectsFromProtokoll = async (): Promise<SoundsLikeMapping[]> => {\n logger.debug('Loading projects from protokoll context');\n\n const contextDirs = await findProtokolDirectories();\n\n if (contextDirs.length === 0) {\n logger.warn('No protokoll context directories found');\n return [];\n }\n\n const mappings: SoundsLikeMapping[] = [];\n\n for (const contextDir of contextDirs) {\n const projectsDir = path.join(contextDir, 'projects');\n\n try {\n const files = await fs.readdir(projectsDir);\n\n for (const file of files) {\n if (!file.endsWith('.yaml') && !file.endsWith('.yml')) continue;\n\n try {\n const content = await fs.readFile(path.join(projectsDir, file), 'utf-8');\n const parsed = yaml.load(content) as Partial<ProtokolProject>;\n\n if (!parsed || !parsed.id || !parsed.name) {\n logger.debug(`Skipping invalid project file: ${file}`);\n continue;\n }\n\n // Skip inactive projects\n if (parsed.active === false) {\n logger.debug(`Skipping inactive project: ${parsed.id}`);\n continue;\n }\n\n // Process sounds_like entries\n if (parsed.sounds_like && parsed.sounds_like.length > 0) {\n for (const soundsLike of parsed.sounds_like) {\n mappings.push({\n soundsLike: soundsLike.toLowerCase(),\n correctText: parsed.name,\n entityType: 'project',\n entityId: parsed.id,\n scopedToProjects: null, // Will be determined by collision detection\n collisionRisk: 'none', // Will be determined by collision detection\n tier: 1, // Will be determined by collision detection\n });\n }\n logger.debug(`Loaded ${parsed.sounds_like.length} sounds_like entries for project: ${parsed.id}`);\n }\n } catch (error: any) {\n logger.warn(`Failed to parse project file ${file}: ${error.message}`);\n }\n }\n } catch (error: any) {\n logger.debug(`Could not read projects directory ${projectsDir}: ${error.message}`);\n }\n }\n\n logger.info(`Loaded ${mappings.length} sounds_like mappings from protokoll projects`);\n return mappings;\n };\n\n /**\n * Detect collisions in mappings\n */\n const detectCollisions = (mappings: SoundsLikeMapping[]): Map<string, Collision> => {\n const collisionMap = new Map<string, SoundsLikeMapping[]>();\n\n // Group by sounds_like value (case-insensitive)\n for (const mapping of mappings) {\n const key = mapping.soundsLike.toLowerCase();\n if (!collisionMap.has(key)) {\n collisionMap.set(key, []);\n }\n collisionMap.get(key)!.push(mapping);\n }\n\n // Identify actual collisions (multiple mappings for same sounds_like)\n const collisions = new Map<string, Collision>();\n for (const [soundsLike, conflictMappings] of collisionMap) {\n if (conflictMappings.length > 1) {\n collisions.set(soundsLike, {\n soundsLike,\n mappings: conflictMappings,\n count: conflictMappings.length,\n });\n logger.debug(`Collision detected for \"${soundsLike}\": ${conflictMappings.length} mappings`);\n }\n }\n\n logger.info(`Detected ${collisions.size} collisions in sounds_like mappings`);\n return collisions;\n };\n\n /**\n * Classify a mapping into a tier based on collision risk\n */\n const classifyTier = (mapping: Partial<SoundsLikeMapping>): 1 | 2 | 3 => {\n if (!mapping.soundsLike) {\n return 3; // Invalid, treat as ambiguous\n }\n\n const soundsLikeLower = mapping.soundsLike.toLowerCase();\n\n // Tier 3: Generic terms (always skip)\n if (database.genericTerms.has(soundsLikeLower)) {\n return 3;\n }\n\n // Tier 2: Common terms (require project-scoping)\n if (database.commonTerms.has(soundsLikeLower)) {\n return 2;\n }\n\n // Tier 2: Has collision with other mappings\n if (database.collisions.has(soundsLikeLower)) {\n return 2;\n }\n\n // Tier 1: Unique, no collisions, not a common term\n return 1;\n };\n\n /**\n * Assign tiers and collision info to all mappings\n */\n const assignTiersAndCollisions = (mappings: SoundsLikeMapping[]): void => {\n for (const mapping of mappings) {\n // Classify tier\n mapping.tier = classifyTier(mapping);\n\n // Determine collision risk\n if (database.collisions.has(mapping.soundsLike.toLowerCase())) {\n mapping.collisionRisk = 'high';\n } else if (database.commonTerms.has(mapping.soundsLike.toLowerCase())) {\n mapping.collisionRisk = 'medium';\n } else if (mapping.tier === 2) {\n mapping.collisionRisk = 'low';\n } else {\n mapping.collisionRisk = 'none';\n }\n\n // Scope to projects for Tier 2\n if (mapping.tier === 2 && mapping.entityType === 'project') {\n mapping.scopedToProjects = [mapping.entityId];\n mapping.minConfidence = config?.tier2Confidence ?? 0.6;\n }\n\n logger.debug(\n `Classified \"${mapping.soundsLike}\" → \"${mapping.correctText}\" ` +\n `(${mapping.entityType}:${mapping.entityId}) as Tier ${mapping.tier} ` +\n `(risk: ${mapping.collisionRisk})`\n );\n }\n };\n\n /**\n * Organize mappings by tier\n */\n const organizeMappingsByTier = (mappings: SoundsLikeMapping[]): void => {\n database.tier1 = [];\n database.tier2 = new Map();\n database.tier3 = [];\n\n for (const mapping of mappings) {\n if (mapping.tier === 1) {\n database.tier1.push(mapping);\n } else if (mapping.tier === 2) {\n // Organize Tier 2 by project ID for efficient lookup\n if (mapping.entityType === 'project') {\n if (!database.tier2.has(mapping.entityId)) {\n database.tier2.set(mapping.entityId, []);\n }\n database.tier2.get(mapping.entityId)!.push(mapping);\n } else {\n // For non-project entities in Tier 2, add to a generic bucket\n if (!database.tier2.has('_generic')) {\n database.tier2.set('_generic', []);\n }\n database.tier2.get('_generic')!.push(mapping);\n }\n } else {\n database.tier3.push(mapping);\n }\n }\n\n logger.info(\n `Organized mappings: Tier 1=${database.tier1.length}, ` +\n `Tier 2=${Array.from(database.tier2.values()).reduce((sum, arr) => sum + arr.length, 0)}, ` +\n `Tier 3=${database.tier3.length}`\n );\n };\n\n /**\n * Load all sounds_like mappings\n */\n const load = async (): Promise<SoundsLikeDatabase> => {\n logger.info('Loading sounds_like database');\n\n // Load from all sources\n const projectMappings = await loadProjectsFromProtokoll();\n // TODO: Load from people source\n // TODO: Load from terms source\n\n const allMappings = [\n ...projectMappings,\n // ...peopleMappings,\n // ...termMappings,\n ];\n\n database.mappings = allMappings;\n\n // Detect collisions\n if (config?.detectCollisions !== false) {\n database.collisions = detectCollisions(allMappings);\n }\n\n // Assign tiers and collision info\n assignTiersAndCollisions(allMappings);\n\n // Organize by tier for efficient lookup\n organizeMappingsByTier(allMappings);\n\n logger.info(`Sounds_like database loaded: ${allMappings.length} total mappings`);\n\n return database;\n };\n\n /**\n * Get Tier 1 mappings (always safe)\n */\n const getTier1Mappings = (): SoundsLikeMapping[] => {\n return database.tier1;\n };\n\n /**\n * Get Tier 2 mappings for a specific project\n */\n const getTier2MappingsForProject = (projectId: string): SoundsLikeMapping[] => {\n const projectMappings = database.tier2.get(projectId) ?? [];\n const genericMappings = database.tier2.get('_generic') ?? [];\n return [...projectMappings, ...genericMappings];\n };\n\n /**\n * Check if a sounds_like value has collisions\n */\n const hasCollision = (soundsLike: string): boolean => {\n return database.collisions.has(soundsLike.toLowerCase());\n };\n\n /**\n * Get collision info\n */\n const getCollision = (soundsLike: string): Collision | undefined => {\n return database.collisions.get(soundsLike.toLowerCase());\n };\n\n /**\n * Get all collisions\n */\n const getAllCollisions = (): Collision[] => {\n return Array.from(database.collisions.values());\n };\n\n return {\n load,\n getTier1Mappings,\n getTier2MappingsForProject,\n hasCollision,\n getCollision,\n getAllCollisions,\n classifyTier,\n };\n};\n\n/**\n * Default common terms that indicate Tier 2 (project-scoped) replacements\n */\nconst DEFAULT_COMMON_TERMS = [\n 'protocol',\n 'observation',\n 'composition',\n 'gateway',\n 'service',\n 'system',\n 'platform',\n];\n\n/**\n * Default generic terms that should never be replaced (Tier 3)\n */\nconst DEFAULT_GENERIC_TERMS = [\n 'meeting',\n 'update',\n 'work',\n 'project',\n 'task',\n 'issue',\n 'discussion',\n 'review',\n 'the',\n 'a',\n 'an',\n];\n"],"names":["Logging.getLogger"],"mappings":";;;;;;AAgJO,MAAM,MAAA,GAAS,CAAC,MAAA,KAAsC;AACzD,EAAA,MAAM,MAAA,GAASA,SAAQ,EAAU;AAEjC,EAAA,MAAM,QAAA,GAA+B;AAAA,IACjC,UAAU,EAAC;AAAA,IACX,OAAO,EAAC;AAAA,IACR,KAAA,sBAAW,GAAA,EAAI;AAAA,IACf,OAAO,EAAC;AAAA,IACR,UAAA,sBAAgB,GAAA,EAAI;AAAA,IACpB,WAAA,EAAa,IAAI,GAAA,CAA2B,oBAAoB,CAAA;AAAA,IAChE,YAAA,EAAc,IAAI,GAAA,CAA4B,qBAAqB;AAAA,GACvE;AAKA,EAAA,MAAM,0BAA0B,YAA+B;AAK3D,IAAA,MAAM,OAAA,GAAU,GAAG,OAAA,EAAQ;AAC3B,IAAA,MAAM,WAAA,GAAc,IAAA,CAAK,IAAA,CAAK,OAAA,EAAS,cAAc,SAAS,CAAA;AAE9D,IAAA,MAAM,OAAiB,EAAC;AAGxB,IAAA,IAAI;AACA,MAAA,MAAM,EAAA,CAAG,OAAO,WAAW,CAAA;AAC3B,MAAA,IAAA,CAAK,KAAK,WAAW,CAAA;AACrB,MAAA,MAAA,CAAO,KAAA,CAAM,CAAA,4BAAA,EAA+B,WAAW,CAAA,CAAE,CAAA;AAAA,IAC7D,CAAA,CAAA,MAAQ;AACJ,MAAA,MAAA,CAAO,KAAA,CAAM,CAAA,+BAAA,EAAkC,WAAW,CAAA,CAAE,CAAA;AAAA,IAChE;AAEA,IAAA,OAAO,IAAA;AAAA,EACX,CAAA;AAKA,EAAA,MAAM,4BAA4B,YAA0C;AACxE,IAAA,MAAA,CAAO,MAAM,yCAAyC,CAAA;AAEtD,IAAA,MAAM,WAAA,GAAc,MAAM,uBAAA,EAAwB;AAElD,IAAA,IAAI,WAAA,CAAY,WAAW,CAAA,EAAG;AAC1B,MAAA,MAAA,CAAO,KAAK,wCAAwC,CAAA;AACpD,MAAA,OAAO,EAAC;AAAA,IACZ;AAEA,IAAA,MAAM,WAAgC,EAAC;AAEvC,IAAA,KAAA,MAAW,cAAc,WAAA,EAAa;AAClC,MAAA,MAAM,WAAA,GAAc,IAAA,CAAK,IAAA,CAAK,UAAA,EAAY,UAAU,CAAA;AAEpD,MAAA,IAAI;AACA,QAAA,MAAM,KAAA,GAAQ,MAAM,EAAA,CAAG,OAAA,CAAQ,WAAW,CAAA;AAE1C,QAAA,KAAA,MAAW,QAAQ,KAAA,EAAO;AACtB,UAAA,IAAI,CAAC,KAAK,QAAA,CAAS,OAAO,KAAK,CAAC,IAAA,CAAK,QAAA,CAAS,MAAM,CAAA,EAAG;AAEvD,UAAA,IAAI;AACA,YAAA,MAAM,OAAA,GAAU,MAAM,EAAA,CAAG,QAAA,CAAS,KAAK,IAAA,CAAK,WAAA,EAAa,IAAI,CAAA,EAAG,OAAO,CAAA;AACvE,YAAA,MAAM,MAAA,GAAS,IAAA,CAAK,IAAA,CAAK,OAAO,CAAA;AAEhC,YAAA,IAAI,CAAC,MAAA,IAAU,CAAC,OAAO,EAAA,IAAM,CAAC,OAAO,IAAA,EAAM;AACvC,cAAA,MAAA,CAAO,KAAA,CAAM,CAAA,+BAAA,EAAkC,IAAI,CAAA,CAAE,CAAA;AACrD,cAAA;AAAA,YACJ;AAGA,YAAA,IAAI,MAAA,CAAO,WAAW,KAAA,EAAO;AACzB,cAAA,MAAA,CAAO,KAAA,CAAM,CAAA,2BAAA,EAA8B,MAAA,CAAO,EAAE,CAAA,CAAE,CAAA;AACtD,cAAA;AAAA,YACJ;AAGA,YAAA,IAAI,MAAA,CAAO,WAAA,IAAe,MAAA,CAAO,WAAA,CAAY,SAAS,CAAA,EAAG;AACrD,cAAA,KAAA,MAAW,UAAA,IAAc,OAAO,WAAA,EAAa;AACzC,gBAAA,QAAA,CAAS,IAAA,CAAK;AAAA,kBACV,UAAA,EAAY,WAAW,WAAA,EAAY;AAAA,kBACnC,aAAa,MAAA,CAAO,IAAA;AAAA,kBACpB,UAAA,EAAY,SAAA;AAAA,kBACZ,UAAU,MAAA,CAAO,EAAA;AAAA,kBACjB,gBAAA,EAAkB,IAAA;AAAA;AAAA,kBAClB,aAAA,EAAe,MAAA;AAAA;AAAA,kBACf,IAAA,EAAM;AAAA;AAAA,iBACT,CAAA;AAAA,cACL;AACA,cAAA,MAAA,CAAO,KAAA,CAAM,UAAU,MAAA,CAAO,WAAA,CAAY,MAAM,CAAA,kCAAA,EAAqC,MAAA,CAAO,EAAE,CAAA,CAAE,CAAA;AAAA,YACpG;AAAA,UACJ,SAAS,KAAA,EAAY;AACjB,YAAA,MAAA,CAAO,KAAK,CAAA,6BAAA,EAAgC,IAAI,CAAA,EAAA,EAAK,KAAA,CAAM,OAAO,CAAA,CAAE,CAAA;AAAA,UACxE;AAAA,QACJ;AAAA,MACJ,SAAS,KAAA,EAAY;AACjB,QAAA,MAAA,CAAO,MAAM,CAAA,kCAAA,EAAqC,WAAW,CAAA,EAAA,EAAK,KAAA,CAAM,OAAO,CAAA,CAAE,CAAA;AAAA,MACrF;AAAA,IACJ;AAEA,IAAA,MAAA,CAAO,IAAA,CAAK,CAAA,OAAA,EAAU,QAAA,CAAS,MAAM,CAAA,6CAAA,CAA+C,CAAA;AACpF,IAAA,OAAO,QAAA;AAAA,EACX,CAAA;AAKA,EAAA,MAAM,gBAAA,GAAmB,CAAC,QAAA,KAA0D;AAChF,IAAA,MAAM,YAAA,uBAAmB,GAAA,EAAiC;AAG1D,IAAA,KAAA,MAAW,WAAW,QAAA,EAAU;AAC5B,MAAA,MAAM,GAAA,GAAM,OAAA,CAAQ,UAAA,CAAW,WAAA,EAAY;AAC3C,MAAA,IAAI,CAAC,YAAA,CAAa,GAAA,CAAI,GAAG,CAAA,EAAG;AACxB,QAAA,YAAA,CAAa,GAAA,CAAI,GAAA,EAAK,EAAE,CAAA;AAAA,MAC5B;AACA,MAAA,YAAA,CAAa,GAAA,CAAI,GAAG,CAAA,CAAG,IAAA,CAAK,OAAO,CAAA;AAAA,IACvC;AAGA,IAAA,MAAM,UAAA,uBAAiB,GAAA,EAAuB;AAC9C,IAAA,KAAA,MAAW,CAAC,UAAA,EAAY,gBAAgB,CAAA,IAAK,YAAA,EAAc;AACvD,MAAA,IAAI,gBAAA,CAAiB,SAAS,CAAA,EAAG;AAC7B,QAAA,UAAA,CAAW,IAAI,UAAA,EAAY;AAAA,UACvB,UAAA;AAAA,UACA,QAAA,EAAU,gBAAA;AAAA,UACV,OAAO,gBAAA,CAAiB;AAAA,SAC3B,CAAA;AACD,QAAA,MAAA,CAAO,MAAM,CAAA,wBAAA,EAA2B,UAAU,CAAA,GAAA,EAAM,gBAAA,CAAiB,MAAM,CAAA,SAAA,CAAW,CAAA;AAAA,MAC9F;AAAA,IACJ;AAEA,IAAA,MAAA,CAAO,IAAA,CAAK,CAAA,SAAA,EAAY,UAAA,CAAW,IAAI,CAAA,mCAAA,CAAqC,CAAA;AAC5E,IAAA,OAAO,UAAA;AAAA,EACX,CAAA;AAKA,EAAA,MAAM,YAAA,GAAe,CAAC,OAAA,KAAmD;AACrE,IAAA,IAAI,CAAC,QAAQ,UAAA,EAAY;AACrB,MAAA,OAAO,CAAA;AAAA,IACX;AAEA,IAAA,MAAM,eAAA,GAAkB,OAAA,CAAQ,UAAA,CAAW,WAAA,EAAY;AAGvD,IAAA,IAAI,QAAA,CAAS,YAAA,CAAa,GAAA,CAAI,eAAe,CAAA,EAAG;AAC5C,MAAA,OAAO,CAAA;AAAA,IACX;AAGA,IAAA,IAAI,QAAA,CAAS,WAAA,CAAY,GAAA,CAAI,eAAe,CAAA,EAAG;AAC3C,MAAA,OAAO,CAAA;AAAA,IACX;AAGA,IAAA,IAAI,QAAA,CAAS,UAAA,CAAW,GAAA,CAAI,eAAe,CAAA,EAAG;AAC1C,MAAA,OAAO,CAAA;AAAA,IACX;AAGA,IAAA,OAAO,CAAA;AAAA,EACX,CAAA;AAKA,EAAA,MAAM,wBAAA,GAA2B,CAAC,QAAA,KAAwC;AACtE,IAAA,KAAA,MAAW,WAAW,QAAA,EAAU;AAE5B,MAAA,OAAA,CAAQ,IAAA,GAAO,aAAa,OAAO,CAAA;AAGnC,MAAA,IAAI,SAAS,UAAA,CAAW,GAAA,CAAI,QAAQ,UAAA,CAAW,WAAA,EAAa,CAAA,EAAG;AAC3D,QAAA,OAAA,CAAQ,aAAA,GAAgB,MAAA;AAAA,MAC5B,CAAA,MAAA,IAAW,SAAS,WAAA,CAAY,GAAA,CAAI,QAAQ,UAAA,CAAW,WAAA,EAAa,CAAA,EAAG;AACnE,QAAA,OAAA,CAAQ,aAAA,GAAgB,QAAA;AAAA,MAC5B,CAAA,MAAA,IAAW,OAAA,CAAQ,IAAA,KAAS,CAAA,EAAG;AAC3B,QAAA,OAAA,CAAQ,aAAA,GAAgB,KAAA;AAAA,MAC5B,CAAA,MAAO;AACH,QAAA,OAAA,CAAQ,aAAA,GAAgB,MAAA;AAAA,MAC5B;AAGA,MAAA,IAAI,OAAA,CAAQ,IAAA,KAAS,CAAA,IAAK,OAAA,CAAQ,eAAe,SAAA,EAAW;AACxD,QAAA,OAAA,CAAQ,gBAAA,GAAmB,CAAC,OAAA,CAAQ,QAAQ,CAAA;AAC5C,QAAA,OAAA,CAAQ,aAAA,GAA2C,GAAA;AAAA,MACvD;AAEA,MAAA,MAAA,CAAO,KAAA;AAAA,QACH,eAAe,OAAA,CAAQ,UAAU,CAAA,KAAA,EAAQ,OAAA,CAAQ,WAAW,CAAA,GAAA,EACxD,OAAA,CAAQ,UAAU,CAAA,CAAA,EAAI,QAAQ,QAAQ,CAAA,UAAA,EAAa,QAAQ,IAAI,CAAA,QAAA,EACzD,QAAQ,aAAa,CAAA,CAAA;AAAA,OACnC;AAAA,IACJ;AAAA,EACJ,CAAA;AAKA,EAAA,MAAM,sBAAA,GAAyB,CAAC,QAAA,KAAwC;AACpE,IAAA,QAAA,CAAS,QAAQ,EAAC;AAClB,IAAA,QAAA,CAAS,KAAA,uBAAY,GAAA,EAAI;AACzB,IAAA,QAAA,CAAS,QAAQ,EAAC;AAElB,IAAA,KAAA,MAAW,WAAW,QAAA,EAAU;AAC5B,MAAA,IAAI,OAAA,CAAQ,SAAS,CAAA,EAAG;AACpB,QAAA,QAAA,CAAS,KAAA,CAAM,KAAK,OAAO,CAAA;AAAA,MAC/B,CAAA,MAAA,IAAW,OAAA,CAAQ,IAAA,KAAS,CAAA,EAAG;AAE3B,QAAA,IAAI,OAAA,CAAQ,eAAe,SAAA,EAAW;AAClC,UAAA,IAAI,CAAC,QAAA,CAAS,KAAA,CAAM,GAAA,CAAI,OAAA,CAAQ,QAAQ,CAAA,EAAG;AACvC,YAAA,QAAA,CAAS,KAAA,CAAM,GAAA,CAAI,OAAA,CAAQ,QAAA,EAAU,EAAE,CAAA;AAAA,UAC3C;AACA,UAAA,QAAA,CAAS,MAAM,GAAA,CAAI,OAAA,CAAQ,QAAQ,CAAA,CAAG,KAAK,OAAO,CAAA;AAAA,QACtD,CAAA,MAAO;AAEH,UAAA,IAAI,CAAC,QAAA,CAAS,KAAA,CAAM,GAAA,CAAI,UAAU,CAAA,EAAG;AACjC,YAAA,QAAA,CAAS,KAAA,CAAM,GAAA,CAAI,UAAA,EAAY,EAAE,CAAA;AAAA,UACrC;AACA,UAAA,QAAA,CAAS,KAAA,CAAM,GAAA,CAAI,UAAU,CAAA,CAAG,KAAK,OAAO,CAAA;AAAA,QAChD;AAAA,MACJ,CAAA,MAAO;AACH,QAAA,QAAA,CAAS,KAAA,CAAM,KAAK,OAAO,CAAA;AAAA,MAC/B;AAAA,IACJ;AAEA,IAAA,MAAA,CAAO,IAAA;AAAA,MACH,CAAA,2BAAA,EAA8B,QAAA,CAAS,KAAA,CAAM,MAAM,CAAA,SAAA,EACzC,MAAM,IAAA,CAAK,QAAA,CAAS,KAAA,CAAM,MAAA,EAAQ,CAAA,CAAE,OAAO,CAAC,GAAA,EAAK,GAAA,KAAQ,GAAA,GAAM,GAAA,CAAI,MAAA,EAAQ,CAAC,CAAC,CAAA,SAAA,EAC7E,QAAA,CAAS,KAAA,CAAM,MAAM,CAAA;AAAA,KACnC;AAAA,EACJ,CAAA;AAKA,EAAA,MAAM,OAAO,YAAyC;AAClD,IAAA,MAAA,CAAO,KAAK,8BAA8B,CAAA;AAG1C,IAAA,MAAM,eAAA,GAAkB,MAAM,yBAAA,EAA0B;AAIxD,IAAA,MAAM,WAAA,GAAc;AAAA,MAChB,GAAG;AAAA;AAAA;AAAA,KAGP;AAEA,IAAA,QAAA,CAAS,QAAA,GAAW,WAAA;AAGpB,IAAwC;AACpC,MAAA,QAAA,CAAS,UAAA,GAAa,iBAAiB,WAAW,CAAA;AAAA,IACtD;AAGA,IAAA,wBAAA,CAAyB,WAAW,CAAA;AAGpC,IAAA,sBAAA,CAAuB,WAAW,CAAA;AAElC,IAAA,MAAA,CAAO,IAAA,CAAK,CAAA,6BAAA,EAAgC,WAAA,CAAY,MAAM,CAAA,eAAA,CAAiB,CAAA;AAE/E,IAAA,OAAO,QAAA;AAAA,EACX,CAAA;AAKA,EAAA,MAAM,mBAAmB,MAA2B;AAChD,IAAA,OAAO,QAAA,CAAS,KAAA;AAAA,EACpB,CAAA;AAKA,EAAA,MAAM,0BAAA,GAA6B,CAAC,SAAA,KAA2C;AAC3E,IAAA,MAAM,kBAAkB,QAAA,CAAS,KAAA,CAAM,GAAA,CAAI,SAAS,KAAK,EAAC;AAC1D,IAAA,MAAM,kBAAkB,QAAA,CAAS,KAAA,CAAM,GAAA,CAAI,UAAU,KAAK,EAAC;AAC3D,IAAA,OAAO,CAAC,GAAG,eAAA,EAAiB,GAAG,eAAe,CAAA;AAAA,EAClD,CAAA;AAKA,EAAA,MAAM,YAAA,GAAe,CAAC,UAAA,KAAgC;AAClD,IAAA,OAAO,QAAA,CAAS,UAAA,CAAW,GAAA,CAAI,UAAA,CAAW,aAAa,CAAA;AAAA,EAC3D,CAAA;AAKA,EAAA,MAAM,YAAA,GAAe,CAAC,UAAA,KAA8C;AAChE,IAAA,OAAO,QAAA,CAAS,UAAA,CAAW,GAAA,CAAI,UAAA,CAAW,aAAa,CAAA;AAAA,EAC3D,CAAA;AAKA,EAAA,MAAM,mBAAmB,MAAmB;AACxC,IAAA,OAAO,KAAA,CAAM,IAAA,CAAK,QAAA,CAAS,UAAA,CAAW,QAAQ,CAAA;AAAA,EAClD,CAAA;AAEA,EAAA,OAAO;AAAA,IACH,IAAA;AAAA,IACA,gBAAA;AAAA,IACA,0BAAA;AAAA,IACA,YAAA;AAAA,IACA,YAAA;AAAA,IACA,gBAAA;AAAA,IACA;AAAA,GACJ;AACJ;AAKA,MAAM,oBAAA,GAAuB;AAAA,EACzB,UAAA;AAAA,EACA,aAAA;AAAA,EACA,aAAA;AAAA,EACA,SAAA;AAAA,EACA,SAAA;AAAA,EACA,QAAA;AAAA,EACA;AACJ,CAAA;AAKA,MAAM,qBAAA,GAAwB;AAAA,EAC1B,SAAA;AAAA,EACA,QAAA;AAAA,EACA,MAAA;AAAA,EACA,SAAA;AAAA,EACA,MAAA;AAAA,EACA,OAAA;AAAA,EACA,YAAA;AAAA,EACA,QAAA;AAAA,EACA,KAAA;AAAA,EACA,GAAA;AAAA,EACA;AACJ,CAAA;;;;"}
1
+ {"version":3,"file":"index55.js","sources":["../src/util/general.ts"],"sourcesContent":["// Utility function for deep merging two objects.\nexport function deepMerge(target: any, source: any): any {\n for (const key in source) {\n if (Object.prototype.hasOwnProperty.call(source, key)) {\n // Block prototype-polluting keys\n if (key === '__proto__' || key === 'constructor') {\n continue;\n }\n if (source[key] && typeof source[key] === 'object' && !Array.isArray(source[key])) {\n if (!target[key]) {\n target[key] = {};\n }\n deepMerge(target[key], source[key]);\n } else {\n target[key] = source[key];\n }\n }\n }\n return target;\n}\n\n//Recursive implementation of jSON.stringify;\nexport const stringifyJSON = function (obj: any): string {\n\n const arrOfKeyVals: string[] = [];\n const arrVals: string[] = [];\n let objKeys: string[] = [];\n\n /*********CHECK FOR PRIMITIVE TYPES**********/\n if (typeof obj === 'number' || typeof obj === 'boolean' || obj === null)\n return '' + obj;\n else if (typeof obj === 'string')\n return '\"' + obj + '\"';\n\n /*********CHECK FOR ARRAY**********/\n else if (Array.isArray(obj)) {\n //check for empty array\n if (obj[0] === undefined)\n return '[]';\n else {\n obj.forEach(function (el) {\n arrVals.push(stringifyJSON(el));\n });\n return '[' + arrVals + ']';\n }\n }\n /*********CHECK FOR OBJECT**********/\n else if (obj instanceof Object) {\n //get object keys\n objKeys = Object.keys(obj);\n //set key output;\n objKeys.forEach(function (key) {\n const keyOut = '\"' + key + '\":';\n const keyValOut = obj[key];\n //skip functions and undefined properties\n if (keyValOut instanceof Function || keyValOut === undefined)\n arrOfKeyVals.push('');\n else if (typeof keyValOut === 'string')\n arrOfKeyVals.push(keyOut + '\"' + keyValOut + '\"');\n else if (typeof keyValOut === 'boolean' || typeof keyValOut === 'number' || keyValOut === null)\n arrOfKeyVals.push(keyOut + keyValOut);\n //check for nested objects, call recursively until no more objects\n else if (keyValOut instanceof Object) {\n arrOfKeyVals.push(keyOut + stringifyJSON(keyValOut));\n }\n });\n return '{' + arrOfKeyVals + '}';\n }\n return '';\n};"],"names":[],"mappings":"AAsBO,MAAM,aAAA,GAAgB,SAAU,GAAA,EAAkB;AAErD,EAAA,MAAM,eAAyB,EAAC;AAChC,EAAA,MAAM,UAAoB,EAAC;AAC3B,EAAA,IAAI,UAAoB,EAAC;AAGzB,EAAA,IAAI,OAAO,GAAA,KAAQ,QAAA,IAAY,OAAO,GAAA,KAAQ,aAAa,GAAA,KAAQ,IAAA;AAC/D,IAAA,OAAO,EAAA,GAAK,GAAA;AAAA,OAAA,IACP,OAAO,GAAA,KAAQ,QAAA;AACpB,IAAA,OAAO,MAAM,GAAA,GAAM,GAAA;AAAA,OAAA,IAGd,KAAA,CAAM,OAAA,CAAQ,GAAG,CAAA,EAAG;AAEzB,IAAA,IAAI,GAAA,CAAI,CAAC,CAAA,KAAM,MAAA;AACX,MAAA,OAAO,IAAA;AAAA,SACN;AACD,MAAA,GAAA,CAAI,OAAA,CAAQ,SAAU,EAAA,EAAI;AACtB,QAAA,OAAA,CAAQ,IAAA,CAAK,aAAA,CAAc,EAAE,CAAC,CAAA;AAAA,MAClC,CAAC,CAAA;AACD,MAAA,OAAO,MAAM,OAAA,GAAU,GAAA;AAAA,IAC3B;AAAA,EACJ,CAAA,MAAA,IAES,eAAe,MAAA,EAAQ;AAE5B,IAAA,OAAA,GAAU,MAAA,CAAO,KAAK,GAAG,CAAA;AAEzB,IAAA,OAAA,CAAQ,OAAA,CAAQ,SAAU,GAAA,EAAK;AAC3B,MAAA,MAAM,MAAA,GAAS,MAAM,GAAA,GAAM,IAAA;AAC3B,MAAA,MAAM,SAAA,GAAY,IAAI,GAAG,CAAA;AAEzB,MAAA,IAAI,SAAA,YAAqB,YAAY,SAAA,KAAc,MAAA;AAC/C,QAAA,YAAA,CAAa,KAAK,EAAE,CAAA;AAAA,WAAA,IACf,OAAO,SAAA,KAAc,QAAA;AAC1B,QAAA,YAAA,CAAa,IAAA,CAAK,MAAA,GAAS,GAAA,GAAM,SAAA,GAAY,GAAG,CAAA;AAAA,WAAA,IAC3C,OAAO,SAAA,KAAc,SAAA,IAAa,OAAO,SAAA,KAAc,YAAY,SAAA,KAAc,IAAA;AACtF,QAAA,YAAA,CAAa,IAAA,CAAK,SAAS,SAAS,CAAA;AAAA,WAAA,IAE/B,qBAAqB,MAAA,EAAQ;AAClC,QAAA,YAAA,CAAa,IAAA,CAAK,MAAA,GAAS,aAAA,CAAc,SAAS,CAAC,CAAA;AAAA,MACvD;AAAA,IACJ,CAAC,CAAA;AACD,IAAA,OAAO,MAAM,YAAA,GAAe,GAAA;AAAA,EAChC;AACA,EAAA,OAAO,EAAA;AACX;;;;"}