@cellaware/utils 8.11.19 → 8.11.20

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (63) hide show
  1. package/dist/azure/cosmos.d.ts +112 -0
  2. package/dist/azure/cosmos.js +305 -0
  3. package/dist/azure/email.d.ts +3 -0
  4. package/dist/azure/email.js +20 -0
  5. package/dist/azure/function.d.ts +14 -0
  6. package/dist/azure/function.js +124 -0
  7. package/dist/azure/slot.d.ts +1 -0
  8. package/dist/azure/slot.js +4 -0
  9. package/dist/azure/storage.d.ts +14 -0
  10. package/dist/azure/storage.js +81 -0
  11. package/dist/chatwms/alert.d.ts +97 -0
  12. package/dist/chatwms/alert.js +74 -0
  13. package/dist/chatwms/azure/cosmos.d.ts +25 -0
  14. package/dist/chatwms/azure/cosmos.js +43 -0
  15. package/dist/chatwms/azure/function.d.ts +21 -0
  16. package/dist/chatwms/azure/function.js +29 -0
  17. package/dist/chatwms/azure/storage.d.ts +15 -0
  18. package/dist/chatwms/azure/storage.js +27 -0
  19. package/dist/chatwms/client.d.ts +18 -0
  20. package/dist/chatwms/client.js +48 -0
  21. package/dist/chatwms/cosmos.d.ts +24 -0
  22. package/dist/chatwms/cosmos.js +532 -0
  23. package/dist/chatwms/dashboard.d.ts +80 -0
  24. package/dist/chatwms/dashboard.js +17 -0
  25. package/dist/chatwms/datagrid.d.ts +215 -0
  26. package/dist/chatwms/datagrid.js +1454 -0
  27. package/dist/chatwms/developer.d.ts +27 -0
  28. package/dist/chatwms/developer.js +12 -0
  29. package/dist/chatwms/github/issue.d.ts +1 -0
  30. package/dist/chatwms/github/issue.js +4 -0
  31. package/dist/chatwms/instance.d.ts +16 -0
  32. package/dist/chatwms/instance.js +18 -0
  33. package/dist/chatwms/integration.d.ts +24 -0
  34. package/dist/chatwms/integration.js +19 -0
  35. package/dist/chatwms/pdf.d.ts +95 -0
  36. package/dist/chatwms/pdf.js +147 -0
  37. package/dist/chatwms/report.d.ts +126 -0
  38. package/dist/chatwms/report.js +55 -0
  39. package/dist/chatwms/response.d.ts +18 -0
  40. package/dist/chatwms/response.js +25 -0
  41. package/dist/chatwms/search.d.ts +12 -0
  42. package/dist/chatwms/search.js +9 -0
  43. package/dist/chatwms/teams.d.ts +237 -0
  44. package/dist/chatwms/teams.js +205 -0
  45. package/dist/chatwms/user.d.ts +31 -0
  46. package/dist/chatwms/user.js +42 -0
  47. package/dist/chatwms/warehouse.d.ts +3 -0
  48. package/dist/chatwms/warehouse.js +3 -0
  49. package/dist/github/issue.d.ts +1 -0
  50. package/dist/github/issue.js +23 -0
  51. package/dist/llm/chain-store.d.ts +49 -0
  52. package/dist/llm/chain-store.js +284 -0
  53. package/dist/llm/cost.d.ts +3 -0
  54. package/dist/llm/cost.js +42 -0
  55. package/dist/llm/model.d.ts +12 -0
  56. package/dist/llm/model.js +1 -0
  57. package/dist/stopwatch.d.ts +8 -0
  58. package/dist/stopwatch.js +36 -0
  59. package/dist/util.d.ts +45 -0
  60. package/dist/util.js +288 -0
  61. package/dist/version.d.ts +4 -0
  62. package/dist/version.js +12 -0
  63. package/package.json +1 -1
@@ -0,0 +1,284 @@
1
+ import fs from 'fs';
2
+ import { BaseChain } from 'langchain/chains';
3
+ import { ChatPromptTemplate } from "@langchain/core/prompts";
4
+ import { ChatOpenAI } from "@langchain/openai";
5
+ import { StringOutputParser } from "@langchain/core/output_parsers";
6
+ import { getLLMTransactionCost } from './cost.js';
7
+ const CHAIN_TIMEOUT_MS = 150_000;
8
+ const TRANSLATION_CHAIN_NAME = 'translation_chain';
9
+ /**
10
+ * `SingleActionChain` only supports single interaction with LLM, and only 1
11
+ * output key is allowed. Output key is defaulted to `answer`.
12
+ */
13
+ export class SingleActionChain extends BaseChain {
14
+ constructor(fields, name, inputKeys) {
15
+ super(fields);
16
+ Object.defineProperty(this, "_name", {
17
+ enumerable: true,
18
+ configurable: true,
19
+ writable: true,
20
+ value: void 0
21
+ });
22
+ Object.defineProperty(this, "llm", {
23
+ enumerable: true,
24
+ configurable: true,
25
+ writable: true,
26
+ value: void 0
27
+ });
28
+ Object.defineProperty(this, "_inputKeys", {
29
+ enumerable: true,
30
+ configurable: true,
31
+ writable: true,
32
+ value: void 0
33
+ });
34
+ Object.defineProperty(this, "_outputKey", {
35
+ enumerable: true,
36
+ configurable: true,
37
+ writable: true,
38
+ value: void 0
39
+ });
40
+ Object.defineProperty(this, "prompt", {
41
+ enumerable: true,
42
+ configurable: true,
43
+ writable: true,
44
+ value: void 0
45
+ });
46
+ Object.defineProperty(this, "promptTemplate", {
47
+ enumerable: true,
48
+ configurable: true,
49
+ writable: true,
50
+ value: void 0
51
+ });
52
+ if (inputKeys.length == 0) {
53
+ throw new Error(`SINGLE_ACTION_CHAIN: Chain '${name}' needs at least one input key!`);
54
+ }
55
+ this._name = name;
56
+ this.llm = fields.llm;
57
+ this._inputKeys = inputKeys;
58
+ this._outputKey = 'answer';
59
+ this.prompt = fields.prompt;
60
+ this.promptTemplate = ChatPromptTemplate.fromTemplate(this.prompt);
61
+ }
62
+ async _call(values, runManager) {
63
+ let output = {
64
+ [this._outputKey]: ''
65
+ };
66
+ const llmChain = this.promptTemplate
67
+ .pipe(this.llm)
68
+ .pipe(new StringOutputParser());
69
+ let llmInputs = {};
70
+ for (const inputKey of this._inputKeys) {
71
+ llmInputs[inputKey] = values[inputKey];
72
+ }
73
+ let callbacks = undefined;
74
+ if (!!runManager && !!runManager.getChild()) {
75
+ callbacks = runManager.getChild().handlers;
76
+ }
77
+ const llmAnswer = await llmChain.invoke(llmInputs, {
78
+ callbacks
79
+ });
80
+ output[this._outputKey] = llmAnswer;
81
+ return output;
82
+ }
83
+ _chainType() {
84
+ return this._name;
85
+ }
86
+ get inputKeys() {
87
+ return this._inputKeys;
88
+ }
89
+ get outputKeys() {
90
+ return [this._outputKey];
91
+ }
92
+ getModelName() {
93
+ return this.llm.modelName;
94
+ }
95
+ getPrompt() {
96
+ return this.prompt;
97
+ }
98
+ }
99
+ export function createSingleActionChain(name, modelName, inputKeys, prompt, temperature, verbose) {
100
+ const llm = new ChatOpenAI({
101
+ modelName, temperature: temperature ?? 0, configuration: {
102
+ organization: process.env.OPENAI_ORGANIZATION,
103
+ timeout: CHAIN_TIMEOUT_MS
104
+ }
105
+ });
106
+ return new SingleActionChain({
107
+ llm,
108
+ prompt,
109
+ verbose: verbose ?? false
110
+ }, name, inputKeys);
111
+ }
112
+ /**
113
+ * A `ChainStore` is a chain registry and calling interface. The `ChainStore`
114
+ * keeps a map of all registered chains.
115
+ *
116
+ * The following chains are built-in:
117
+ * - `translation_chain`: `translate()`
118
+ *
119
+ */
120
+ export class ChainStore {
121
+ constructor(promptsPath, defaultModelName) {
122
+ Object.defineProperty(this, "promptsPath", {
123
+ enumerable: true,
124
+ configurable: true,
125
+ writable: true,
126
+ value: void 0
127
+ });
128
+ Object.defineProperty(this, "chains", {
129
+ enumerable: true,
130
+ configurable: true,
131
+ writable: true,
132
+ value: void 0
133
+ });
134
+ this.promptsPath = promptsPath;
135
+ this.chains = new Map();
136
+ this.initBuiltinChains(defaultModelName);
137
+ }
138
+ initBuiltinChains(defaultModelName) {
139
+ // Translation Chain:
140
+ {
141
+ this.addExistingChain(createSingleActionChain(TRANSLATION_CHAIN_NAME, defaultModelName ?? 'gpt-4o-mini', ['statement', 'language'], `You are a helpful AI translator who translates an English statement to {language}.
142
+
143
+ Given an English statement, translate the English statement into {language}.
144
+
145
+ Here is the English statement that you need to translate to {language}:
146
+ {statement}
147
+
148
+ Your translation here:
149
+ `));
150
+ }
151
+ }
152
+ static getTokenUsage(chainName, modelName, tokenUsage) {
153
+ return {
154
+ chain: chainName,
155
+ model: modelName,
156
+ tokenUsage,
157
+ cost: getLLMTransactionCost(tokenUsage, modelName)
158
+ };
159
+ }
160
+ getPrompt(chainName, templateChainName) {
161
+ let prompt = {
162
+ content: '',
163
+ variables: []
164
+ };
165
+ const path = !!templateChainName ? `${this.promptsPath}/${templateChainName}.md` : `${this.promptsPath}/${chainName}.md`;
166
+ if (!fs.existsSync(path)) {
167
+ throw new Error(`CHAIN_STORE: Prompt file '${path}' not found`);
168
+ }
169
+ const content = fs.readFileSync(path, 'utf8');
170
+ if (content.length === 0) {
171
+ throw new Error(`CHAIN_STORE: Prompt file '${path}' is empty`);
172
+ }
173
+ let matches = content.matchAll(/{.*?}/gm);
174
+ let distinctMatches = new Set();
175
+ for (const match of Array.from(matches)) {
176
+ let matchStr = match[0];
177
+ // Exclude escaped curly braces.
178
+ if (!matchStr.startsWith('{{')) {
179
+ distinctMatches.add(matchStr.substring(1, matchStr.length - 1));
180
+ }
181
+ }
182
+ prompt.content = content;
183
+ prompt.variables = Array.from(distinctMatches);
184
+ return prompt;
185
+ }
186
+ /**
187
+ * - `name`: Chain name
188
+ * - `template`: Template chain name to copy prompt from
189
+ * - `options`: LLM options
190
+ * - `verbose`: OpenAI verbose parameter
191
+ */
192
+ addChain(name, template, options, verbose) {
193
+ const prompt = this.getPrompt(name, template);
194
+ let llmArgs = {
195
+ modelName: options.modelName,
196
+ temperature: options.temperature ?? 0,
197
+ configuration: {
198
+ organization: process.env.OPENAI_ORGANIZATION,
199
+ timeout: CHAIN_TIMEOUT_MS
200
+ }
201
+ };
202
+ if (!!options.openai) {
203
+ if (!!options.openai.reasoning_effort) {
204
+ llmArgs.reasoning_effort = options.openai.reasoning_effort;
205
+ }
206
+ if (!!options.openai.verbosity) {
207
+ llmArgs.verbosity = options.openai.verbosity;
208
+ }
209
+ }
210
+ const llm = new ChatOpenAI(llmArgs);
211
+ const chain = new SingleActionChain({
212
+ llm,
213
+ prompt: prompt.content,
214
+ verbose: verbose ?? false
215
+ }, name, prompt.variables);
216
+ this.chains.set(name, chain);
217
+ }
218
+ addExistingChain(chain) {
219
+ this.chains.set(chain._chainType(), chain);
220
+ }
221
+ async callChain(name, args, tokenUsages) {
222
+ let chain = this.chains.get(name);
223
+ if (!chain) {
224
+ throw new Error(`CHAIN_STORE: Chain '${name}' does not exist`);
225
+ }
226
+ let presentInputKeySet = new Set();
227
+ for (const inputKey of Object.keys(args)) {
228
+ presentInputKeySet.add(inputKey);
229
+ }
230
+ let presentInputKeys = Array.from(presentInputKeySet);
231
+ // Make sure all input keys are present.
232
+ for (const inputKey of chain.inputKeys) {
233
+ if (!presentInputKeys.includes(inputKey)) {
234
+ throw new Error(`CHAIN_STORE: Input key '${inputKey}' not present for chain '${chain._chainType()}'`);
235
+ }
236
+ }
237
+ return chain.invoke(args, {
238
+ callbacks: [
239
+ {
240
+ handleLLMEnd: async (output) => {
241
+ const usage = output?.llmOutput?.tokenUsage;
242
+ tokenUsages.push(ChainStore.getTokenUsage(chain._chainType(), chain.getModelName(), usage));
243
+ },
244
+ }
245
+ ]
246
+ });
247
+ }
248
+ async translate(statement, language, tokenUsages) {
249
+ // No need to translate if requested language is in default language.
250
+ const DEFAULT_LANGUAGE = 'English';
251
+ if (language.toLowerCase() === DEFAULT_LANGUAGE.toLowerCase()) {
252
+ return statement;
253
+ }
254
+ const translationAnswer = await this.callChain(TRANSLATION_CHAIN_NAME, { statement, language }, tokenUsages ?? []);
255
+ let translationAnswerStr = translationAnswer.answer;
256
+ /*
257
+ Need to check if AI indicator exists in the answer. It is possible that the
258
+ translation chain will include this in an answer. Pretty sure this happens
259
+ because the translation chain can see conversation history, and messages from
260
+ AI will be prefixed as such.
261
+ */
262
+ const AI_PREFIX = 'AI: ';
263
+ if (translationAnswerStr.includes(AI_PREFIX)) {
264
+ console.log(`CHAIN_STORE: Removing AI indicator from translation answer`);
265
+ translationAnswerStr = translationAnswerStr.substring(translationAnswerStr.indexOf(AI_PREFIX) + AI_PREFIX.length);
266
+ }
267
+ // Remove 'Your translation here:' if it is there.
268
+ if (translationAnswerStr.includes('Your translation here:')) {
269
+ translationAnswerStr = translationAnswerStr.substring(translationAnswerStr.indexOf('Your translation here:') + 'Your translation here:'.length);
270
+ }
271
+ return translationAnswerStr.trim();
272
+ }
273
+ getPrompts() {
274
+ let buf = '';
275
+ for (const chain of Array.from(this.chains.values())) {
276
+ buf += `================================================= =================================================
277
+ ${chain._chainType()}
278
+ ================================================= =================================================
279
+ ${chain.getPrompt()}
280
+ `;
281
+ }
282
+ return buf;
283
+ }
284
+ }
@@ -0,0 +1,3 @@
1
+ import { ModelName } from "./model.js";
2
+ export declare function getLLMCostPerToken(modelName: ModelName): any;
3
+ export declare function getLLMTransactionCost(tokenUsage: any, modelName: ModelName): number;
@@ -0,0 +1,42 @@
1
+ // https://platform.openai.com/docs/pricing
2
+ const MODEL_COSTS = {
3
+ 'gpt-5.2-pro': { inputCost: 21.00, outputCost: 168.00 },
4
+ 'gpt-5.2': { inputCost: 1.75, outputCost: 14.00 },
5
+ 'gpt-5.2-chat-latest': { inputCost: 1.75, outputCost: 14.00 },
6
+ 'gpt-5.2-codex': { inputCost: 1.75, outputCost: 14.00 },
7
+ 'gpt-5.1': { inputCost: 1.25, outputCost: 10.00 },
8
+ 'gpt-5': { inputCost: 1.25, outputCost: 10.00 },
9
+ 'gpt-5-mini': { inputCost: 0.25, outputCost: 2.00 },
10
+ 'gpt-5-nano': { inputCost: 0.05, outputCost: 0.40 },
11
+ 'gpt-4.1': { inputCost: 2.00, outputCost: 8.00 },
12
+ 'gpt-4.1-mini': { inputCost: 0.40, outputCost: 1.60 },
13
+ 'gpt-4.1-nano': { inputCost: 0.10, outputCost: 0.40 },
14
+ 'gpt-4o': { inputCost: 2.50, outputCost: 10.00 },
15
+ 'gpt-4o-mini': { inputCost: 0.15, outputCost: 0.60 },
16
+ 'o1': { inputCost: 15.00, outputCost: 60.00 },
17
+ 'o1-mini': { inputCost: 1.10, outputCost: 4.40 },
18
+ 'o3': { inputCost: 2.00, outputCost: 8.00 },
19
+ 'o3-mini': { inputCost: 1.10, outputCost: 4.40 },
20
+ 'o4-mini': { inputCost: 1.10, outputCost: 4.40 },
21
+ };
22
+ export function getLLMCostPerToken(modelName) {
23
+ const cost = MODEL_COSTS[modelName];
24
+ if (!cost) {
25
+ throw new Error(`Model '${modelName}' cost is not configured`);
26
+ }
27
+ let inputCost = cost.inputCost;
28
+ let outputCost = cost.outputCost;
29
+ // OpenAI model costs are measured in millions of tokens -- therefore we need to divide each figure by 1,000,000 before returning.
30
+ const unit = 1_000_000.0;
31
+ inputCost /= unit;
32
+ outputCost /= unit;
33
+ return { inputCost, outputCost };
34
+ }
35
+ export function getLLMTransactionCost(tokenUsage, modelName) {
36
+ let cost = 0.0;
37
+ if (!!tokenUsage) {
38
+ const { inputCost, outputCost } = getLLMCostPerToken(modelName);
39
+ cost = (tokenUsage.promptTokens * inputCost) + (tokenUsage.completionTokens * outputCost);
40
+ }
41
+ return cost;
42
+ }
@@ -0,0 +1,12 @@
1
+ export type ModelName = 'gpt-5.2-pro' | 'gpt-5.2-codex' | 'gpt-5.2-chat-latest' | 'gpt-5.2' | 'gpt-5.1' | 'gpt-5' | 'gpt-5-mini' | 'gpt-5-nano' | 'gpt-4.1' | 'gpt-4.1-mini' | 'gpt-4.1-nano' | 'gpt-4o' | 'gpt-4o-mini' | 'o1' | 'o1-mini' | 'o3' | 'o3-mini' | 'o4-mini';
2
+ export type ReasoningEffort = 'minimal' | 'low' | 'medium' | 'high';
3
+ export type Verbosity = 'low' | 'medium' | 'high';
4
+ export interface OpenAIModelOptions {
5
+ reasoning_effort?: ReasoningEffort;
6
+ verbosity?: Verbosity;
7
+ }
8
+ export interface ModelOptions {
9
+ modelName: string;
10
+ temperature?: number;
11
+ openai?: OpenAIModelOptions;
12
+ }
@@ -0,0 +1 @@
1
+ export {};
@@ -0,0 +1,8 @@
1
+ export declare class Stopwatch {
2
+ private startTime?;
3
+ private elapsed;
4
+ start(): void;
5
+ stop(): void;
6
+ reset(): void;
7
+ getElapsedMs(): number;
8
+ }
@@ -0,0 +1,36 @@
1
+ export class Stopwatch {
2
+ constructor() {
3
+ Object.defineProperty(this, "startTime", {
4
+ enumerable: true,
5
+ configurable: true,
6
+ writable: true,
7
+ value: void 0
8
+ });
9
+ Object.defineProperty(this, "elapsed", {
10
+ enumerable: true,
11
+ configurable: true,
12
+ writable: true,
13
+ value: 0n
14
+ });
15
+ }
16
+ start() {
17
+ if (!this.startTime) {
18
+ this.startTime = process.hrtime.bigint();
19
+ }
20
+ }
21
+ stop() {
22
+ if (!!this.startTime) {
23
+ this.elapsed += process.hrtime.bigint() - this.startTime;
24
+ this.startTime = undefined;
25
+ }
26
+ }
27
+ reset() {
28
+ this.elapsed = 0n;
29
+ this.startTime = undefined;
30
+ }
31
+ getElapsedMs() {
32
+ const now = this.startTime ? process.hrtime.bigint() : 0n;
33
+ const total = this.elapsed + (this.startTime ? now - this.startTime : 0n);
34
+ return Number(total) / 1e6;
35
+ }
36
+ }
package/dist/util.d.ts ADDED
@@ -0,0 +1,45 @@
1
+ export interface KeyValuePair {
2
+ key: string;
3
+ value: any;
4
+ }
5
+ export declare function sleep(ms: number): Promise<any>;
6
+ export declare function reverse(str: string): string;
7
+ export declare function base64Encode(str: string): string;
8
+ export declare function base64Decode(str: string): string;
9
+ export declare function isArrayLike(obj: any): boolean;
10
+ /**
11
+ * Default `idx` is 100.
12
+ */
13
+ export declare function truncateValue(value: any, idx?: number): any;
14
+ /**
15
+ * Default `idx` is 100.
16
+ */
17
+ export declare function truncateValuePreserveNewLines(value: string, idx?: number): string;
18
+ export declare function initDate(timeZone?: string): Date;
19
+ export declare function convertDateTimeZone(date: Date, timeZone: string): Date;
20
+ export declare function isDaylightSavingTime(timeZone?: string): boolean;
21
+ export declare function isLeapYear(timeZone?: string): boolean;
22
+ export declare function getDaysInMonth(timeZone?: string): number;
23
+ export declare function getCurrentDayInMonth(timeZone?: string): number;
24
+ export declare function getDaysInYear(timeZone?: string): 366 | 365;
25
+ export declare function getCurrentMonth(timeZone?: string): number;
26
+ export declare function getCurrentYear(timeZone?: string): number;
27
+ export declare function isDateString(value: any): boolean;
28
+ /**
29
+ * NOTE: `stopIdx` represents last character index of word
30
+ * ex: ABC -> `startIdx`: 0 `stopIdx`: 2
31
+ */
32
+ export interface QueryRegexMatch {
33
+ word: string;
34
+ startIdx: number;
35
+ stopIdx: number;
36
+ }
37
+ /**
38
+ * Will only return matches that **are not** contained in single quotes,
39
+ * double quotes, single line, or multiline comments.
40
+ */
41
+ export declare function getQueryMatches(query: string, regex: RegExp, specificMatchGroup?: number): QueryRegexMatch[];
42
+ export declare function removeQueryMatches(query: string, matches: QueryRegexMatch[]): string;
43
+ export declare function replaceQueryMatches(query: string, matches: QueryRegexMatch[], replacement: string, useMatchAsSuffix?: boolean): string;
44
+ export declare function removeMarkdownIndicators(input: string): string;
45
+ export declare function removePrefixIndicators(input: string, prefixes: string[]): string;