@discomedia/utils 1.0.9 → 1.0.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index-frontend.cjs +54 -7
- package/dist/index-frontend.cjs.map +1 -1
- package/dist/index-frontend.mjs +54 -7
- package/dist/index-frontend.mjs.map +1 -1
- package/dist/index.cjs +54 -7
- package/dist/index.cjs.map +1 -1
- package/dist/index.mjs +54 -7
- package/dist/index.mjs.map +1 -1
- package/dist/package.json +1 -1
- package/dist/test.js +7270 -6168
- package/dist/test.js.map +1 -1
- package/dist/types/llm-openai.d.ts +18 -1
- package/dist/types/llm-openai.d.ts.map +1 -1
- package/dist/types/test.d.ts +1 -1
- package/dist/types/test.d.ts.map +1 -1
- package/dist/types-frontend/llm-openai.d.ts +18 -1
- package/dist/types-frontend/llm-openai.d.ts.map +1 -1
- package/dist/types-frontend/test.d.ts +1 -1
- package/dist/types-frontend/test.d.ts.map +1 -1
- package/package.json +1 -1
|
@@ -2,6 +2,13 @@ import { Tool } from 'openai/resources/responses/responses';
|
|
|
2
2
|
import { LLMResponse, LLMOptions, LLMModel } from './types';
|
|
3
3
|
import { ResponseCreateParamsNonStreaming } from 'openai/resources/responses/responses';
|
|
4
4
|
export declare const DEFAULT_OPTIONS: LLMOptions;
|
|
5
|
+
/**
|
|
6
|
+
* Context message for conversation history
|
|
7
|
+
*/
|
|
8
|
+
export interface ContextMessage {
|
|
9
|
+
role: 'user' | 'assistant' | 'system' | 'developer';
|
|
10
|
+
content: string;
|
|
11
|
+
}
|
|
5
12
|
/**
|
|
6
13
|
* Checks if the given model supports the temperature parameter. Reasoning models (o1*, o3*, o4*) do not support temperature.
|
|
7
14
|
* @param model The model to check.
|
|
@@ -48,8 +55,17 @@ export declare const makeResponsesAPICall: <T = any>(input: string | ResponseCre
|
|
|
48
55
|
* Makes a call to the OpenAI Responses API for advanced use cases with built-in tools.
|
|
49
56
|
*
|
|
50
57
|
* @param input The text prompt to send to the model (e.g., "What's in this image?")
|
|
51
|
-
* @param options The options for the Responses API call, including optional image data.
|
|
58
|
+
* @param options The options for the Responses API call, including optional image data and context.
|
|
52
59
|
* @return A promise that resolves to the response from the Responses API.
|
|
60
|
+
*
|
|
61
|
+
* @example
|
|
62
|
+
* // With conversation context
|
|
63
|
+
* const response = await makeLLMCall("What did I ask about earlier?", {
|
|
64
|
+
* context: [
|
|
65
|
+
* { role: 'user', content: 'What is the capital of France?' },
|
|
66
|
+
* { role: 'assistant', content: 'The capital of France is Paris.' }
|
|
67
|
+
* ]
|
|
68
|
+
* });
|
|
53
69
|
*/
|
|
54
70
|
export declare function makeLLMCall<T = any>(input: string, options?: {
|
|
55
71
|
apiKey?: string;
|
|
@@ -60,5 +76,6 @@ export declare function makeLLMCall<T = any>(input: string, options?: {
|
|
|
60
76
|
useWebSearch?: boolean;
|
|
61
77
|
imageBase64?: string;
|
|
62
78
|
imageDetail?: 'low' | 'high' | 'auto';
|
|
79
|
+
context?: ContextMessage[];
|
|
63
80
|
}): Promise<LLMResponse<T>>;
|
|
64
81
|
//# sourceMappingURL=llm-openai.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"llm-openai.d.ts","sourceRoot":"","sources":["../../src/llm-openai.ts"],"names":[],"mappings":"AAIA,OAAO,EAAE,IAAI,
|
|
1
|
+
{"version":3,"file":"llm-openai.d.ts","sourceRoot":"","sources":["../../src/llm-openai.ts"],"names":[],"mappings":"AAIA,OAAO,EAAE,IAAI,EAAoE,MAAM,sCAAsC,CAAC;AAK9H,OAAO,EAAE,WAAW,EAAE,UAAU,EAAE,QAAQ,EAAwB,MAAM,SAAS,CAAC;AAClF,OAAO,EACL,gCAAgC,EAOjC,MAAM,sCAAsC,CAAC;AAE9C,eAAO,MAAM,eAAe,EAAE,UAG7B,CAAC;AAEF;;GAEG;AACH,MAAM,WAAW,cAAc;IAC7B,IAAI,EAAE,MAAM,GAAG,WAAW,GAAG,QAAQ,GAAG,WAAW,CAAC;IACpD,OAAO,EAAE,MAAM,CAAC;CACjB;AAkBD;;;;GAIG;AACH,wBAAgB,mBAAmB,CAAC,KAAK,EAAE,MAAM,GAAG,OAAO,CAI1D;AAED;;;;GAIG;AACH,wBAAgB,gBAAgB,CAAC,KAAK,EAAE,MAAM,GAAG,OAAO,CAGvD;AAED;;;;;;;;;;;;;;;;;;;;;;;;;GAyBG;AACH,eAAO,MAAM,oBAAoB,GAAU,CAAC,GAAG,GAAG,EAChD,OAAO,MAAM,GAAG,gCAAgC,CAAC,OAAO,CAAC,EACzD,UAAS,IAAI,CAAC,gCAAgC,EAAE,OAAO,GAAG,OAAO,CAAC,GAAG;IACnE,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,KAAK,CAAC,EAAE,MAAM,CAAC;CACX,KACL,OAAO,CAAC,WAAW,CAAC,CAAC,CAAC,CA0HxB,CAAC;AAGF;;;;;;;;;;;;;;;GAeG;AACH,wBAAsB,WAAW,CAAC,CAAC,GAAG,GAAG,EACvC,KAAK,EAAE,MAAM,EACb,OAAO,GAAE;IACP,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,KAAK,CAAC,EAAE,QAAQ,CAAC;IACjB,cAAc,CAAC,EAAE,MAAM,GAAG,MAAM,CAAC;IACjC,KAAK,CAAC,EAAE,IAAI,EAAE,CAAC;IACf,kBAAkB,CAAC,EAAE,OAAO,CAAC;IAC7B,YAAY,CAAC,EAAE,OAAO,CAAC;IACvB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,WAAW,CAAC,EAAE,KAAK,GAAG,MAAM,GAAG,MAAM,CAAC;IACtC,OAAO,CAAC,EAAE,cAAc,EAAE,CAAC;CACvB,GACL,OAAO,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,CAsHzB"}
|
package/dist/types/test.d.ts
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
|
|
1
|
+
import 'dotenv/config';
|
|
2
2
|
//# sourceMappingURL=test.d.ts.map
|
package/dist/types/test.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"test.d.ts","sourceRoot":"","sources":["../../src/test.ts"],"names":[],"mappings":""}
|
|
1
|
+
{"version":3,"file":"test.d.ts","sourceRoot":"","sources":["../../src/test.ts"],"names":[],"mappings":"AAEA,OAAO,eAAe,CAAC"}
|
|
@@ -2,6 +2,13 @@ import { Tool } from 'openai/resources/responses/responses';
|
|
|
2
2
|
import { LLMResponse, LLMOptions, LLMModel } from './types';
|
|
3
3
|
import { ResponseCreateParamsNonStreaming } from 'openai/resources/responses/responses';
|
|
4
4
|
export declare const DEFAULT_OPTIONS: LLMOptions;
|
|
5
|
+
/**
|
|
6
|
+
* Context message for conversation history
|
|
7
|
+
*/
|
|
8
|
+
export interface ContextMessage {
|
|
9
|
+
role: 'user' | 'assistant' | 'system' | 'developer';
|
|
10
|
+
content: string;
|
|
11
|
+
}
|
|
5
12
|
/**
|
|
6
13
|
* Checks if the given model supports the temperature parameter. Reasoning models (o1*, o3*, o4*) do not support temperature.
|
|
7
14
|
* @param model The model to check.
|
|
@@ -48,8 +55,17 @@ export declare const makeResponsesAPICall: <T = any>(input: string | ResponseCre
|
|
|
48
55
|
* Makes a call to the OpenAI Responses API for advanced use cases with built-in tools.
|
|
49
56
|
*
|
|
50
57
|
* @param input The text prompt to send to the model (e.g., "What's in this image?")
|
|
51
|
-
* @param options The options for the Responses API call, including optional image data.
|
|
58
|
+
* @param options The options for the Responses API call, including optional image data and context.
|
|
52
59
|
* @return A promise that resolves to the response from the Responses API.
|
|
60
|
+
*
|
|
61
|
+
* @example
|
|
62
|
+
* // With conversation context
|
|
63
|
+
* const response = await makeLLMCall("What did I ask about earlier?", {
|
|
64
|
+
* context: [
|
|
65
|
+
* { role: 'user', content: 'What is the capital of France?' },
|
|
66
|
+
* { role: 'assistant', content: 'The capital of France is Paris.' }
|
|
67
|
+
* ]
|
|
68
|
+
* });
|
|
53
69
|
*/
|
|
54
70
|
export declare function makeLLMCall<T = any>(input: string, options?: {
|
|
55
71
|
apiKey?: string;
|
|
@@ -60,5 +76,6 @@ export declare function makeLLMCall<T = any>(input: string, options?: {
|
|
|
60
76
|
useWebSearch?: boolean;
|
|
61
77
|
imageBase64?: string;
|
|
62
78
|
imageDetail?: 'low' | 'high' | 'auto';
|
|
79
|
+
context?: ContextMessage[];
|
|
63
80
|
}): Promise<LLMResponse<T>>;
|
|
64
81
|
//# sourceMappingURL=llm-openai.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"llm-openai.d.ts","sourceRoot":"","sources":["../../src/llm-openai.ts"],"names":[],"mappings":"AAIA,OAAO,EAAE,IAAI,
|
|
1
|
+
{"version":3,"file":"llm-openai.d.ts","sourceRoot":"","sources":["../../src/llm-openai.ts"],"names":[],"mappings":"AAIA,OAAO,EAAE,IAAI,EAAoE,MAAM,sCAAsC,CAAC;AAK9H,OAAO,EAAE,WAAW,EAAE,UAAU,EAAE,QAAQ,EAAwB,MAAM,SAAS,CAAC;AAClF,OAAO,EACL,gCAAgC,EAOjC,MAAM,sCAAsC,CAAC;AAE9C,eAAO,MAAM,eAAe,EAAE,UAG7B,CAAC;AAEF;;GAEG;AACH,MAAM,WAAW,cAAc;IAC7B,IAAI,EAAE,MAAM,GAAG,WAAW,GAAG,QAAQ,GAAG,WAAW,CAAC;IACpD,OAAO,EAAE,MAAM,CAAC;CACjB;AAkBD;;;;GAIG;AACH,wBAAgB,mBAAmB,CAAC,KAAK,EAAE,MAAM,GAAG,OAAO,CAI1D;AAED;;;;GAIG;AACH,wBAAgB,gBAAgB,CAAC,KAAK,EAAE,MAAM,GAAG,OAAO,CAGvD;AAED;;;;;;;;;;;;;;;;;;;;;;;;;GAyBG;AACH,eAAO,MAAM,oBAAoB,GAAU,CAAC,GAAG,GAAG,EAChD,OAAO,MAAM,GAAG,gCAAgC,CAAC,OAAO,CAAC,EACzD,UAAS,IAAI,CAAC,gCAAgC,EAAE,OAAO,GAAG,OAAO,CAAC,GAAG;IACnE,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,KAAK,CAAC,EAAE,MAAM,CAAC;CACX,KACL,OAAO,CAAC,WAAW,CAAC,CAAC,CAAC,CA0HxB,CAAC;AAGF;;;;;;;;;;;;;;;GAeG;AACH,wBAAsB,WAAW,CAAC,CAAC,GAAG,GAAG,EACvC,KAAK,EAAE,MAAM,EACb,OAAO,GAAE;IACP,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,KAAK,CAAC,EAAE,QAAQ,CAAC;IACjB,cAAc,CAAC,EAAE,MAAM,GAAG,MAAM,CAAC;IACjC,KAAK,CAAC,EAAE,IAAI,EAAE,CAAC;IACf,kBAAkB,CAAC,EAAE,OAAO,CAAC;IAC7B,YAAY,CAAC,EAAE,OAAO,CAAC;IACvB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,WAAW,CAAC,EAAE,KAAK,GAAG,MAAM,GAAG,MAAM,CAAC;IACtC,OAAO,CAAC,EAAE,cAAc,EAAE,CAAC;CACvB,GACL,OAAO,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC,CAsHzB"}
|
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
|
|
1
|
+
import 'dotenv/config';
|
|
2
2
|
//# sourceMappingURL=test.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"test.d.ts","sourceRoot":"","sources":["../../src/test.ts"],"names":[],"mappings":""}
|
|
1
|
+
{"version":3,"file":"test.d.ts","sourceRoot":"","sources":["../../src/test.ts"],"names":[],"mappings":"AAEA,OAAO,eAAe,CAAC"}
|