mdi-llmkit 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +108 -0
- package/dist/gpt_api/functions.d.ts +25 -0
- package/dist/gpt_api/functions.js +193 -0
- package/dist/gpt_api/gpt_conversation.d.ts +43 -0
- package/dist/gpt_api/gpt_conversation.js +146 -0
- package/dist/gpt_api/json_schema_format.d.ts +18 -0
- package/dist/gpt_api/json_schema_format.js +195 -0
- package/dist/index.d.ts +3 -0
- package/dist/index.js +3 -0
- package/package.json +40 -0
package/README.md
ADDED
|
@@ -0,0 +1,108 @@
|
|
|
1
|
+
# mdi-llmkit (TypeScript)
|
|
2
|
+
|
|
3
|
+
Utilities for managing LLM chat conversations and structured JSON responses with OpenAI's Responses API.
|
|
4
|
+
|
|
5
|
+
## Installation
|
|
6
|
+
|
|
7
|
+
```bash
|
|
8
|
+
npm install mdi-llmkit openai
|
|
9
|
+
```
|
|
10
|
+
|
|
11
|
+
## Quick Start
|
|
12
|
+
|
|
13
|
+
### `gptSubmit`
|
|
14
|
+
|
|
15
|
+
```ts
|
|
16
|
+
import OpenAI from "openai";
|
|
17
|
+
import { gptSubmit } from "mdi-llmkit";
|
|
18
|
+
|
|
19
|
+
const client = new OpenAI({ apiKey: process.env.OPENAI_API_KEY });
|
|
20
|
+
|
|
21
|
+
const reply = await gptSubmit(
|
|
22
|
+
[{ role: "user", content: "Say hello." }],
|
|
23
|
+
client,
|
|
24
|
+
);
|
|
25
|
+
|
|
26
|
+
console.log(reply);
|
|
27
|
+
```
|
|
28
|
+
|
|
29
|
+
### `GptConversation`
|
|
30
|
+
|
|
31
|
+
```ts
|
|
32
|
+
import OpenAI from "openai";
|
|
33
|
+
import { GptConversation } from "mdi-llmkit";
|
|
34
|
+
|
|
35
|
+
const client = new OpenAI({ apiKey: process.env.OPENAI_API_KEY });
|
|
36
|
+
const conversation = new GptConversation([], { openaiClient: client });
|
|
37
|
+
|
|
38
|
+
const reply = await conversation.submitUserMessage("Give me three project name ideas.");
|
|
39
|
+
console.log(reply);
|
|
40
|
+
```
|
|
41
|
+
|
|
42
|
+
### `JSONSchemaFormat`
|
|
43
|
+
|
|
44
|
+
```ts
|
|
45
|
+
import { JSONSchemaFormat, JSON_INTEGER, gptSubmit } from "mdi-llmkit";
|
|
46
|
+
|
|
47
|
+
const responseFormat = JSONSchemaFormat(
|
|
48
|
+
{
|
|
49
|
+
answer: "The final answer",
|
|
50
|
+
confidence: ["Confidence score", [0, 100], []],
|
|
51
|
+
rank: JSON_INTEGER,
|
|
52
|
+
},
|
|
53
|
+
{
|
|
54
|
+
name: "answer_payload",
|
|
55
|
+
description: "Structured answer payload",
|
|
56
|
+
},
|
|
57
|
+
);
|
|
58
|
+
|
|
59
|
+
const result = await gptSubmit(
|
|
60
|
+
[{ role: "user", content: "Return answer as structured JSON." }],
|
|
61
|
+
client,
|
|
62
|
+
{ jsonResponse: responseFormat },
|
|
63
|
+
);
|
|
64
|
+
```
|
|
65
|
+
|
|
66
|
+
## JSON Response Mode
|
|
67
|
+
|
|
68
|
+
```ts
|
|
69
|
+
import OpenAI from "openai";
|
|
70
|
+
import { gptSubmit } from "mdi-llmkit";
|
|
71
|
+
|
|
72
|
+
const client = new OpenAI({ apiKey: process.env.OPENAI_API_KEY });
|
|
73
|
+
|
|
74
|
+
const result = await gptSubmit(
|
|
75
|
+
[{ role: "user", content: "Return JSON with keys a and b." }],
|
|
76
|
+
client,
|
|
77
|
+
{ jsonResponse: true },
|
|
78
|
+
);
|
|
79
|
+
|
|
80
|
+
console.log(result);
|
|
81
|
+
```
|
|
82
|
+
|
|
83
|
+
## Notes
|
|
84
|
+
|
|
85
|
+
- Current TypeScript parity slices include `gptSubmit`, `GptConversation`, and `JSONSchemaFormat`.
|
|
86
|
+
- Integer schemas can be expressed with `JSON_INTEGER`; numeric (float-capable) schemas can use `JSON_NUMBER`.
|
|
87
|
+
|
|
88
|
+
## Migration from Python
|
|
89
|
+
|
|
90
|
+
- Function naming: Python `gpt_submit(...)` maps to TypeScript `gptSubmit(...)`.
|
|
91
|
+
- Argument style: Python keyword args map to a TypeScript options object.
|
|
92
|
+
- Conversation submit methods: Python `submit_user_message(...)` maps to `submitUserMessage(...)`.
|
|
93
|
+
- JSON schema DSL: Python tuple metadata uses TypeScript array metadata.
|
|
94
|
+
- Python: `("Age", (0, 120), int)`
|
|
95
|
+
- TypeScript: `["Age", [0, 120], JSON_INTEGER]`
|
|
96
|
+
- JSON schema type markers in TypeScript:
|
|
97
|
+
- `JSON_INTEGER` for integer-only values.
|
|
98
|
+
- `JSON_NUMBER` for float-capable numeric values.
|
|
99
|
+
|
|
100
|
+
## CI and Release
|
|
101
|
+
|
|
102
|
+
- CI workflow: `.github/workflows/typescript-ci.yml`
|
|
103
|
+
- Runs on push to `main` and on pull requests when TypeScript package files change.
|
|
104
|
+
- Executes `npm ci`, `npm test`, and `npm run build` in `packages/typescript-mdi-llmkit`.
|
|
105
|
+
- Release workflow: `.github/workflows/typescript-release.yml`
|
|
106
|
+
- Runs on tags matching `typescript-v*` (for example: `typescript-v0.1.0`).
|
|
107
|
+
- Requires repository secret `NPM_TOKEN` with publish permission to npm.
|
|
108
|
+
- Executes tests/build before `npm publish --access public --provenance`.
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
export declare const GPT_MODEL_CHEAP = "gpt-4.1-nano";
|
|
2
|
+
export declare const GPT_MODEL_SMART = "gpt-4.1";
|
|
3
|
+
export interface OpenAIClientLike {
|
|
4
|
+
responses: {
|
|
5
|
+
create: (args: {
|
|
6
|
+
model: string;
|
|
7
|
+
input: unknown[];
|
|
8
|
+
text?: Record<string, unknown>;
|
|
9
|
+
}) => Promise<any> | any;
|
|
10
|
+
};
|
|
11
|
+
}
|
|
12
|
+
export interface SystemMessage {
|
|
13
|
+
role: "system";
|
|
14
|
+
content: string;
|
|
15
|
+
}
|
|
16
|
+
export interface GptSubmitOptions {
|
|
17
|
+
model?: string;
|
|
18
|
+
jsonResponse?: boolean | Record<string, unknown> | string;
|
|
19
|
+
systemAnnouncementMessage?: string;
|
|
20
|
+
retryLimit?: number;
|
|
21
|
+
retryBackoffTimeSeconds?: number;
|
|
22
|
+
warningCallback?: (message: string) => void;
|
|
23
|
+
}
|
|
24
|
+
export declare function currentDatetimeSystemMessage(): SystemMessage;
|
|
25
|
+
export declare function gptSubmit(messages: unknown[], openaiClient: OpenAIClientLike, options?: GptSubmitOptions): Promise<string | Record<string, unknown> | unknown[] | number | boolean | null>;
|
|
@@ -0,0 +1,193 @@
|
|
|
1
|
+
export const GPT_MODEL_CHEAP = "gpt-4.1-nano";
|
|
2
|
+
export const GPT_MODEL_SMART = "gpt-4.1";
|
|
3
|
+
const GPT_RETRY_LIMIT_DEFAULT = 5;
|
|
4
|
+
const GPT_RETRY_BACKOFF_TIME_SECONDS_DEFAULT = 30;
|
|
5
|
+
function isRecord(value) {
|
|
6
|
+
return typeof value === "object" && value !== null && !Array.isArray(value);
|
|
7
|
+
}
|
|
8
|
+
function sleep(ms) {
|
|
9
|
+
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
10
|
+
}
|
|
11
|
+
function parseFirstJsonValue(input) {
|
|
12
|
+
const text = input.trimStart();
|
|
13
|
+
if (!text) {
|
|
14
|
+
throw new SyntaxError("Unexpected end of JSON input");
|
|
15
|
+
}
|
|
16
|
+
const first = text[0];
|
|
17
|
+
if (first === "{" || first === "[") {
|
|
18
|
+
const closing = first === "{" ? "}" : "]";
|
|
19
|
+
let depth = 0;
|
|
20
|
+
let inString = false;
|
|
21
|
+
let escaped = false;
|
|
22
|
+
for (let index = 0; index < text.length; index += 1) {
|
|
23
|
+
const char = text[index];
|
|
24
|
+
if (inString) {
|
|
25
|
+
if (escaped) {
|
|
26
|
+
escaped = false;
|
|
27
|
+
continue;
|
|
28
|
+
}
|
|
29
|
+
if (char === "\\") {
|
|
30
|
+
escaped = true;
|
|
31
|
+
continue;
|
|
32
|
+
}
|
|
33
|
+
if (char === '"') {
|
|
34
|
+
inString = false;
|
|
35
|
+
}
|
|
36
|
+
continue;
|
|
37
|
+
}
|
|
38
|
+
if (char === '"') {
|
|
39
|
+
inString = true;
|
|
40
|
+
continue;
|
|
41
|
+
}
|
|
42
|
+
if (char === first) {
|
|
43
|
+
depth += 1;
|
|
44
|
+
}
|
|
45
|
+
else if (char === closing) {
|
|
46
|
+
depth -= 1;
|
|
47
|
+
if (depth === 0) {
|
|
48
|
+
return JSON.parse(text.slice(0, index + 1));
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
throw new SyntaxError("Unexpected end of JSON input");
|
|
53
|
+
}
|
|
54
|
+
if (first === '"') {
|
|
55
|
+
let escaped = false;
|
|
56
|
+
for (let index = 1; index < text.length; index += 1) {
|
|
57
|
+
const char = text[index];
|
|
58
|
+
if (escaped) {
|
|
59
|
+
escaped = false;
|
|
60
|
+
continue;
|
|
61
|
+
}
|
|
62
|
+
if (char === "\\") {
|
|
63
|
+
escaped = true;
|
|
64
|
+
continue;
|
|
65
|
+
}
|
|
66
|
+
if (char === '"') {
|
|
67
|
+
return JSON.parse(text.slice(0, index + 1));
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
throw new SyntaxError("Unexpected end of JSON input");
|
|
71
|
+
}
|
|
72
|
+
if (text.startsWith("true")) {
|
|
73
|
+
return true;
|
|
74
|
+
}
|
|
75
|
+
if (text.startsWith("false")) {
|
|
76
|
+
return false;
|
|
77
|
+
}
|
|
78
|
+
if (text.startsWith("null")) {
|
|
79
|
+
return null;
|
|
80
|
+
}
|
|
81
|
+
const numberMatch = text.match(/^-?(?:0|[1-9]\d*)(?:\.\d+)?(?:[eE][+-]?\d+)?/);
|
|
82
|
+
if (numberMatch) {
|
|
83
|
+
return JSON.parse(numberMatch[0]);
|
|
84
|
+
}
|
|
85
|
+
throw new SyntaxError("Unexpected token in JSON input");
|
|
86
|
+
}
|
|
87
|
+
function isRetryableOpenAIError(error) {
|
|
88
|
+
if (!(error instanceof Error)) {
|
|
89
|
+
return false;
|
|
90
|
+
}
|
|
91
|
+
const name = error.name || "";
|
|
92
|
+
return name.includes("OpenAI") || name.includes("APIError");
|
|
93
|
+
}
|
|
94
|
+
export function currentDatetimeSystemMessage() {
|
|
95
|
+
const now = new Date();
|
|
96
|
+
const pad = (value) => value.toString().padStart(2, "0");
|
|
97
|
+
const timestamp = `${now.getFullYear()}-${pad(now.getMonth() + 1)}-${pad(now.getDate())} ` +
|
|
98
|
+
`${pad(now.getHours())}:${pad(now.getMinutes())}:${pad(now.getSeconds())}`;
|
|
99
|
+
return {
|
|
100
|
+
role: "system",
|
|
101
|
+
content: `!DATETIME: The current date and time is ${timestamp}`,
|
|
102
|
+
};
|
|
103
|
+
}
|
|
104
|
+
export async function gptSubmit(messages, openaiClient, options = {}) {
|
|
105
|
+
const model = options.model || GPT_MODEL_SMART;
|
|
106
|
+
const retryLimit = options.retryLimit ?? GPT_RETRY_LIMIT_DEFAULT;
|
|
107
|
+
const retryBackoffTimeSeconds = options.retryBackoffTimeSeconds ?? GPT_RETRY_BACKOFF_TIME_SECONDS_DEFAULT;
|
|
108
|
+
let failedError = null;
|
|
109
|
+
let openaiTextParam;
|
|
110
|
+
if (options.jsonResponse) {
|
|
111
|
+
if (typeof options.jsonResponse === "boolean") {
|
|
112
|
+
openaiTextParam = { format: { type: "json_object" } };
|
|
113
|
+
}
|
|
114
|
+
else if (typeof options.jsonResponse === "string") {
|
|
115
|
+
openaiTextParam = JSON.parse(options.jsonResponse);
|
|
116
|
+
}
|
|
117
|
+
else if (isRecord(options.jsonResponse)) {
|
|
118
|
+
openaiTextParam = JSON.parse(JSON.stringify(options.jsonResponse));
|
|
119
|
+
const format = openaiTextParam.format;
|
|
120
|
+
if (isRecord(format) && typeof format.description === "string") {
|
|
121
|
+
format.description =
|
|
122
|
+
`${format.description}\n\nABSOLUTELY NO UNICODE ALLOWED. ` +
|
|
123
|
+
`Only use typeable keyboard characters. Do not try to circumvent this rule ` +
|
|
124
|
+
`with escape sequences, backslashes, or other tricks. Use double dashes (--), ` +
|
|
125
|
+
`straight quotes (") and single quotes (') instead of em-dashes, en-dashes, ` +
|
|
126
|
+
`and curly versions.`.trim();
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
}
|
|
130
|
+
const filteredMessages = messages.filter((message) => {
|
|
131
|
+
if (!isRecord(message)) {
|
|
132
|
+
return true;
|
|
133
|
+
}
|
|
134
|
+
const role = message.role;
|
|
135
|
+
const content = message.content;
|
|
136
|
+
return !(role === "system" &&
|
|
137
|
+
typeof content === "string" &&
|
|
138
|
+
content.startsWith("!DATETIME:"));
|
|
139
|
+
});
|
|
140
|
+
let preparedMessages = [currentDatetimeSystemMessage(), ...filteredMessages];
|
|
141
|
+
if (options.systemAnnouncementMessage && options.systemAnnouncementMessage.trim()) {
|
|
142
|
+
preparedMessages = [
|
|
143
|
+
{ role: "system", content: options.systemAnnouncementMessage.trim() },
|
|
144
|
+
...preparedMessages,
|
|
145
|
+
];
|
|
146
|
+
}
|
|
147
|
+
for (let index = 0; index < retryLimit; index += 1) {
|
|
148
|
+
let llmReply = "";
|
|
149
|
+
try {
|
|
150
|
+
const payload = {
|
|
151
|
+
model,
|
|
152
|
+
input: preparedMessages,
|
|
153
|
+
};
|
|
154
|
+
if (openaiTextParam) {
|
|
155
|
+
payload.text = openaiTextParam;
|
|
156
|
+
}
|
|
157
|
+
const llmResponse = await openaiClient.responses.create(payload);
|
|
158
|
+
if (llmResponse.error && options.warningCallback) {
|
|
159
|
+
options.warningCallback(`ERROR: OpenAI API returned an error: ${llmResponse.error}`);
|
|
160
|
+
}
|
|
161
|
+
if (llmResponse.incomplete_details && options.warningCallback) {
|
|
162
|
+
options.warningCallback(`ERROR: OpenAI API returned incomplete details: ${llmResponse.incomplete_details}`);
|
|
163
|
+
}
|
|
164
|
+
llmReply = llmResponse.output_text.trim();
|
|
165
|
+
if (!options.jsonResponse) {
|
|
166
|
+
return `${llmReply}`;
|
|
167
|
+
}
|
|
168
|
+
return parseFirstJsonValue(llmReply);
|
|
169
|
+
}
|
|
170
|
+
catch (error) {
|
|
171
|
+
if (error instanceof SyntaxError) {
|
|
172
|
+
failedError = error;
|
|
173
|
+
if (options.warningCallback) {
|
|
174
|
+
options.warningCallback(`JSON decode error:\n\n${error}.\n\nRaw text of LLM Reply:\n${llmReply}\n\nRetrying (attempt ${index + 1} of ${retryLimit}) immediately...`);
|
|
175
|
+
}
|
|
176
|
+
continue;
|
|
177
|
+
}
|
|
178
|
+
if (isRetryableOpenAIError(error)) {
|
|
179
|
+
failedError = error;
|
|
180
|
+
if (options.warningCallback) {
|
|
181
|
+
options.warningCallback(`OpenAI API error:\n\n${error}.\n\nRetrying (attempt ${index + 1} of ${retryLimit}) in ${retryBackoffTimeSeconds} seconds...`);
|
|
182
|
+
}
|
|
183
|
+
await sleep(retryBackoffTimeSeconds * 1000);
|
|
184
|
+
continue;
|
|
185
|
+
}
|
|
186
|
+
throw error;
|
|
187
|
+
}
|
|
188
|
+
}
|
|
189
|
+
if (failedError) {
|
|
190
|
+
throw failedError;
|
|
191
|
+
}
|
|
192
|
+
throw new Error("Unknown error occurred in gptSubmit");
|
|
193
|
+
}
|
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
import { type OpenAIClientLike } from "./functions.js";
|
|
2
|
+
export interface ConversationMessage {
|
|
3
|
+
role: string;
|
|
4
|
+
content: string;
|
|
5
|
+
}
|
|
6
|
+
export interface GptConversationOptions {
|
|
7
|
+
openaiClient?: OpenAIClientLike;
|
|
8
|
+
model?: string;
|
|
9
|
+
}
|
|
10
|
+
export interface SubmitOptions {
|
|
11
|
+
model?: string;
|
|
12
|
+
jsonResponse?: boolean | Record<string, unknown> | string;
|
|
13
|
+
}
|
|
14
|
+
export declare class GptConversation extends Array<ConversationMessage> {
|
|
15
|
+
#private;
|
|
16
|
+
static get [Symbol.species](): ArrayConstructor;
|
|
17
|
+
get openaiClient(): OpenAIClientLike | undefined;
|
|
18
|
+
set openaiClient(value: OpenAIClientLike | undefined);
|
|
19
|
+
get model(): string | undefined;
|
|
20
|
+
set model(value: string | undefined);
|
|
21
|
+
get lastReply(): unknown;
|
|
22
|
+
set lastReply(value: unknown);
|
|
23
|
+
constructor(messages?: ConversationMessage[], options?: GptConversationOptions);
|
|
24
|
+
assignMessages(messages?: ConversationMessage[]): this;
|
|
25
|
+
clone(): GptConversation;
|
|
26
|
+
submit(message?: string | Record<string, unknown>, role?: string | null, options?: SubmitOptions): Promise<unknown>;
|
|
27
|
+
addMessage(role: string, content: unknown): this;
|
|
28
|
+
addUserMessage(content: unknown): this;
|
|
29
|
+
addAssistantMessage(content: unknown): this;
|
|
30
|
+
addSystemMessage(content: unknown): this;
|
|
31
|
+
addDeveloperMessage(content: unknown): this;
|
|
32
|
+
submitMessage(role: string, content: unknown): Promise<unknown>;
|
|
33
|
+
submitUserMessage(content: unknown): Promise<unknown>;
|
|
34
|
+
submitAssistantMessage(content: unknown): Promise<unknown>;
|
|
35
|
+
submitSystemMessage(content: unknown): Promise<unknown>;
|
|
36
|
+
submitDeveloperMessage(content: unknown): Promise<unknown>;
|
|
37
|
+
getLastMessage(): ConversationMessage | null;
|
|
38
|
+
getMessagesByRole(role: string): ConversationMessage[];
|
|
39
|
+
getLastReplyStr(): string;
|
|
40
|
+
getLastReplyDict(): Record<string, unknown>;
|
|
41
|
+
getLastReplyDictField(fieldName: string, defaultValue?: unknown): unknown;
|
|
42
|
+
toDictList(): ConversationMessage[];
|
|
43
|
+
}
|
|
@@ -0,0 +1,146 @@
|
|
|
1
|
+
import { GPT_MODEL_SMART, gptSubmit } from "./functions.js";
|
|
2
|
+
function isRecord(value) {
|
|
3
|
+
return typeof value === "object" && value !== null && !Array.isArray(value);
|
|
4
|
+
}
|
|
5
|
+
export class GptConversation extends Array {
|
|
6
|
+
static get [Symbol.species]() {
|
|
7
|
+
return Array;
|
|
8
|
+
}
|
|
9
|
+
#openaiClient;
|
|
10
|
+
#model;
|
|
11
|
+
#lastReply = null;
|
|
12
|
+
get openaiClient() {
|
|
13
|
+
return this.#openaiClient;
|
|
14
|
+
}
|
|
15
|
+
set openaiClient(value) {
|
|
16
|
+
this.#openaiClient = value;
|
|
17
|
+
}
|
|
18
|
+
get model() {
|
|
19
|
+
return this.#model;
|
|
20
|
+
}
|
|
21
|
+
set model(value) {
|
|
22
|
+
this.#model = value;
|
|
23
|
+
}
|
|
24
|
+
get lastReply() {
|
|
25
|
+
return this.#lastReply;
|
|
26
|
+
}
|
|
27
|
+
set lastReply(value) {
|
|
28
|
+
this.#lastReply = value;
|
|
29
|
+
}
|
|
30
|
+
constructor(messages = [], options = {}) {
|
|
31
|
+
super(...messages);
|
|
32
|
+
this.#openaiClient = options.openaiClient;
|
|
33
|
+
this.#model = options.model;
|
|
34
|
+
}
|
|
35
|
+
assignMessages(messages) {
|
|
36
|
+
this.length = 0;
|
|
37
|
+
if (messages?.length) {
|
|
38
|
+
this.push(...messages);
|
|
39
|
+
}
|
|
40
|
+
return this;
|
|
41
|
+
}
|
|
42
|
+
clone() {
|
|
43
|
+
return new GptConversation(JSON.parse(JSON.stringify([...this])), {
|
|
44
|
+
openaiClient: this.openaiClient,
|
|
45
|
+
model: this.model,
|
|
46
|
+
});
|
|
47
|
+
}
|
|
48
|
+
async submit(message, role = "user", options = {}) {
|
|
49
|
+
if (!this.openaiClient) {
|
|
50
|
+
throw new Error("OpenAI client is not set. Please provide an OpenAI client.");
|
|
51
|
+
}
|
|
52
|
+
const model = options.model || this.model || GPT_MODEL_SMART;
|
|
53
|
+
let jsonResponse = options.jsonResponse;
|
|
54
|
+
if (message) {
|
|
55
|
+
if (isRecord(message)) {
|
|
56
|
+
if (!jsonResponse && "format" in message) {
|
|
57
|
+
jsonResponse = message;
|
|
58
|
+
}
|
|
59
|
+
if (!role && typeof message.role === "string") {
|
|
60
|
+
role = message.role;
|
|
61
|
+
}
|
|
62
|
+
if ("content" in message) {
|
|
63
|
+
message = String(message.content ?? "");
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
this.addMessage(role || "user", message);
|
|
67
|
+
}
|
|
68
|
+
const llmReply = await gptSubmit(this.toDictList(), this.openaiClient, {
|
|
69
|
+
jsonResponse,
|
|
70
|
+
model,
|
|
71
|
+
});
|
|
72
|
+
this.addAssistantMessage(llmReply);
|
|
73
|
+
this.lastReply = llmReply;
|
|
74
|
+
return llmReply;
|
|
75
|
+
}
|
|
76
|
+
addMessage(role, content) {
|
|
77
|
+
let normalizedContent;
|
|
78
|
+
if (typeof content === "string") {
|
|
79
|
+
normalizedContent = content;
|
|
80
|
+
}
|
|
81
|
+
else if (isRecord(content)) {
|
|
82
|
+
normalizedContent = JSON.stringify(content, null, 2);
|
|
83
|
+
}
|
|
84
|
+
else {
|
|
85
|
+
normalizedContent = String(content);
|
|
86
|
+
}
|
|
87
|
+
this.push({ role, content: normalizedContent });
|
|
88
|
+
return this;
|
|
89
|
+
}
|
|
90
|
+
addUserMessage(content) {
|
|
91
|
+
return this.addMessage("user", content);
|
|
92
|
+
}
|
|
93
|
+
addAssistantMessage(content) {
|
|
94
|
+
return this.addMessage("assistant", content);
|
|
95
|
+
}
|
|
96
|
+
addSystemMessage(content) {
|
|
97
|
+
return this.addMessage("system", content);
|
|
98
|
+
}
|
|
99
|
+
addDeveloperMessage(content) {
|
|
100
|
+
return this.addMessage("developer", content);
|
|
101
|
+
}
|
|
102
|
+
async submitMessage(role, content) {
|
|
103
|
+
this.addMessage(role, content);
|
|
104
|
+
return this.submit();
|
|
105
|
+
}
|
|
106
|
+
async submitUserMessage(content) {
|
|
107
|
+
this.addUserMessage(content);
|
|
108
|
+
return this.submit();
|
|
109
|
+
}
|
|
110
|
+
async submitAssistantMessage(content) {
|
|
111
|
+
this.addAssistantMessage(content);
|
|
112
|
+
return this.submit();
|
|
113
|
+
}
|
|
114
|
+
async submitSystemMessage(content) {
|
|
115
|
+
this.addSystemMessage(content);
|
|
116
|
+
return this.submit();
|
|
117
|
+
}
|
|
118
|
+
async submitDeveloperMessage(content) {
|
|
119
|
+
this.addDeveloperMessage(content);
|
|
120
|
+
return this.submit();
|
|
121
|
+
}
|
|
122
|
+
getLastMessage() {
|
|
123
|
+
return this.length ? this[this.length - 1] : null;
|
|
124
|
+
}
|
|
125
|
+
getMessagesByRole(role) {
|
|
126
|
+
return this.filter((message) => message.role === role);
|
|
127
|
+
}
|
|
128
|
+
getLastReplyStr() {
|
|
129
|
+
return typeof this.lastReply === "string" ? this.lastReply : "";
|
|
130
|
+
}
|
|
131
|
+
getLastReplyDict() {
|
|
132
|
+
if (!isRecord(this.lastReply)) {
|
|
133
|
+
return {};
|
|
134
|
+
}
|
|
135
|
+
return JSON.parse(JSON.stringify(this.lastReply));
|
|
136
|
+
}
|
|
137
|
+
getLastReplyDictField(fieldName, defaultValue = null) {
|
|
138
|
+
if (!isRecord(this.lastReply)) {
|
|
139
|
+
return null;
|
|
140
|
+
}
|
|
141
|
+
return this.lastReply[fieldName] ?? defaultValue;
|
|
142
|
+
}
|
|
143
|
+
toDictList() {
|
|
144
|
+
return [...this];
|
|
145
|
+
}
|
|
146
|
+
}
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
export declare const JSON_INTEGER: unique symbol;
|
|
2
|
+
export declare const JSON_NUMBER: unique symbol;
|
|
3
|
+
export declare const JSON_STRING: StringConstructor;
|
|
4
|
+
export declare const JSON_BOOLEAN: BooleanConstructor;
|
|
5
|
+
export interface JSONSchemaFormatOptions {
|
|
6
|
+
name?: string;
|
|
7
|
+
description?: string;
|
|
8
|
+
}
|
|
9
|
+
export interface JSONSchemaFormatResult {
|
|
10
|
+
format: {
|
|
11
|
+
type: "json_schema";
|
|
12
|
+
strict: true;
|
|
13
|
+
name?: string;
|
|
14
|
+
description?: string;
|
|
15
|
+
schema: Record<string, unknown>;
|
|
16
|
+
};
|
|
17
|
+
}
|
|
18
|
+
export declare function JSONSchemaFormat(schema: unknown, options?: JSONSchemaFormatOptions): JSONSchemaFormatResult;
|
|
@@ -0,0 +1,195 @@
|
|
|
1
|
+
export const JSON_INTEGER = Symbol("JSON_INTEGER");
|
|
2
|
+
export const JSON_NUMBER = Symbol("JSON_NUMBER");
|
|
3
|
+
export const JSON_STRING = String;
|
|
4
|
+
export const JSON_BOOLEAN = Boolean;
|
|
5
|
+
const TYPEMAP = new Map([
|
|
6
|
+
[JSON_STRING, "string"],
|
|
7
|
+
[JSON_INTEGER, "integer"],
|
|
8
|
+
[JSON_NUMBER, "number"],
|
|
9
|
+
[JSON_BOOLEAN, "boolean"],
|
|
10
|
+
[String, "string"],
|
|
11
|
+
[Boolean, "boolean"],
|
|
12
|
+
[BigInt, "integer"],
|
|
13
|
+
[Number, "number"],
|
|
14
|
+
]);
|
|
15
|
+
function isRecord(value) {
|
|
16
|
+
return typeof value === "object" && value !== null && !Array.isArray(value);
|
|
17
|
+
}
|
|
18
|
+
function isStringArray(value) {
|
|
19
|
+
return Array.isArray(value) && value.every((item) => typeof item === "string");
|
|
20
|
+
}
|
|
21
|
+
function isNumericRangeArray(value) {
|
|
22
|
+
if (!Array.isArray(value) || value.length !== 2) {
|
|
23
|
+
return false;
|
|
24
|
+
}
|
|
25
|
+
const [min, max] = value;
|
|
26
|
+
const minValid = min === null || typeof min === "number";
|
|
27
|
+
const maxValid = max === null || typeof max === "number";
|
|
28
|
+
return minValid && maxValid && (typeof min === "number" || typeof max === "number");
|
|
29
|
+
}
|
|
30
|
+
function isTupleMetadataArray(value) {
|
|
31
|
+
if (!Array.isArray(value) || value.length < 2) {
|
|
32
|
+
return false;
|
|
33
|
+
}
|
|
34
|
+
if (isStringArray(value)) {
|
|
35
|
+
return false;
|
|
36
|
+
}
|
|
37
|
+
return value.some((item) => typeof item === "string" || isNumericRangeArray(item));
|
|
38
|
+
}
|
|
39
|
+
function inferPrimitiveType(schemaValue) {
|
|
40
|
+
const direct = TYPEMAP.get(schemaValue);
|
|
41
|
+
if (direct) {
|
|
42
|
+
return direct;
|
|
43
|
+
}
|
|
44
|
+
if (typeof schemaValue === "string") {
|
|
45
|
+
return "string";
|
|
46
|
+
}
|
|
47
|
+
if (typeof schemaValue === "boolean") {
|
|
48
|
+
return "boolean";
|
|
49
|
+
}
|
|
50
|
+
if (typeof schemaValue === "bigint") {
|
|
51
|
+
return "integer";
|
|
52
|
+
}
|
|
53
|
+
if (typeof schemaValue === "number") {
|
|
54
|
+
return Number.isInteger(schemaValue) ? "integer" : "number";
|
|
55
|
+
}
|
|
56
|
+
return null;
|
|
57
|
+
}
|
|
58
|
+
function convertSchemaRecursive(subschema) {
|
|
59
|
+
let subschemaDescription = "";
|
|
60
|
+
let subschemaEnum = [];
|
|
61
|
+
let subschemaNumrange = [null, null];
|
|
62
|
+
let subschemaValue = subschema;
|
|
63
|
+
if (isTupleMetadataArray(subschema)) {
|
|
64
|
+
for (const item of subschema) {
|
|
65
|
+
if (!item) {
|
|
66
|
+
subschemaValue = item;
|
|
67
|
+
continue;
|
|
68
|
+
}
|
|
69
|
+
if (typeof item === "string") {
|
|
70
|
+
subschemaDescription = item;
|
|
71
|
+
continue;
|
|
72
|
+
}
|
|
73
|
+
if (isStringArray(item) && item.length >= 2) {
|
|
74
|
+
subschemaEnum = item;
|
|
75
|
+
continue;
|
|
76
|
+
}
|
|
77
|
+
if (isNumericRangeArray(item)) {
|
|
78
|
+
subschemaNumrange = item;
|
|
79
|
+
continue;
|
|
80
|
+
}
|
|
81
|
+
subschemaValue = item;
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
if ((Array.isArray(subschemaValue) && isTupleMetadataArray(subschemaValue)) ||
|
|
85
|
+
(Array.isArray(subschemaValue) && subschemaValue.length === 0)) {
|
|
86
|
+
if (subschemaEnum.length > 0) {
|
|
87
|
+
subschemaValue = JSON_STRING;
|
|
88
|
+
}
|
|
89
|
+
const [nr0, nr1] = subschemaNumrange;
|
|
90
|
+
if (nr0 !== null || nr1 !== null) {
|
|
91
|
+
if ((typeof nr0 === "number" && !Number.isInteger(nr0)) || (typeof nr1 === "number" && !Number.isInteger(nr1))) {
|
|
92
|
+
subschemaValue = JSON_NUMBER;
|
|
93
|
+
}
|
|
94
|
+
else {
|
|
95
|
+
subschemaValue = JSON_INTEGER;
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
const result = {};
|
|
100
|
+
if (isRecord(subschemaValue)) {
|
|
101
|
+
result.type = "object";
|
|
102
|
+
if (subschemaDescription) {
|
|
103
|
+
result.description = subschemaDescription;
|
|
104
|
+
}
|
|
105
|
+
result.additionalProperties = false;
|
|
106
|
+
const keys = Object.keys(subschemaValue);
|
|
107
|
+
result.required = keys;
|
|
108
|
+
const properties = {};
|
|
109
|
+
for (const [key, value] of Object.entries(subschemaValue)) {
|
|
110
|
+
if (typeof value === "string") {
|
|
111
|
+
properties[key] = { type: "string", description: value };
|
|
112
|
+
}
|
|
113
|
+
else {
|
|
114
|
+
properties[key] = convertSchemaRecursive(value);
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
result.properties = properties;
|
|
118
|
+
}
|
|
119
|
+
else if (Array.isArray(subschemaValue)) {
|
|
120
|
+
if (subschemaValue.length >= 2 && isStringArray(subschemaValue)) {
|
|
121
|
+
result.type = "string";
|
|
122
|
+
subschemaEnum = subschemaValue;
|
|
123
|
+
}
|
|
124
|
+
else {
|
|
125
|
+
result.type = "array";
|
|
126
|
+
if (subschemaDescription) {
|
|
127
|
+
result.description = subschemaDescription;
|
|
128
|
+
}
|
|
129
|
+
if (subschemaNumrange[0] !== null) {
|
|
130
|
+
result.minItems = subschemaNumrange[0];
|
|
131
|
+
}
|
|
132
|
+
if (subschemaNumrange[1] !== null) {
|
|
133
|
+
result.maxItems = subschemaNumrange[1];
|
|
134
|
+
}
|
|
135
|
+
const arrayExemplar = subschemaValue[0];
|
|
136
|
+
if (typeof arrayExemplar === "string") {
|
|
137
|
+
result.items = { type: "string", description: arrayExemplar };
|
|
138
|
+
}
|
|
139
|
+
else {
|
|
140
|
+
result.items = convertSchemaRecursive(arrayExemplar);
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
}
|
|
144
|
+
else {
|
|
145
|
+
const primitiveType = inferPrimitiveType(subschemaValue);
|
|
146
|
+
if (!primitiveType) {
|
|
147
|
+
throw new Error(`Unrecognized type for schema value: ${String(subschemaValue)}`);
|
|
148
|
+
}
|
|
149
|
+
result.type = primitiveType;
|
|
150
|
+
if (subschemaDescription) {
|
|
151
|
+
result.description = subschemaDescription;
|
|
152
|
+
}
|
|
153
|
+
}
|
|
154
|
+
if (subschemaEnum.length) {
|
|
155
|
+
result.enum = subschemaEnum;
|
|
156
|
+
}
|
|
157
|
+
if (result.type === "integer" || result.type === "number") {
|
|
158
|
+
if (subschemaNumrange[0] !== null) {
|
|
159
|
+
result.minimum = subschemaNumrange[0];
|
|
160
|
+
}
|
|
161
|
+
if (subschemaNumrange[1] !== null) {
|
|
162
|
+
result.maximum = subschemaNumrange[1];
|
|
163
|
+
}
|
|
164
|
+
}
|
|
165
|
+
return result;
|
|
166
|
+
}
|
|
167
|
+
export function JSONSchemaFormat(schema, options = {}) {
|
|
168
|
+
const result = {
|
|
169
|
+
format: {
|
|
170
|
+
type: "json_schema",
|
|
171
|
+
strict: true,
|
|
172
|
+
schema: { type: "object", properties: {}, required: [], additionalProperties: false },
|
|
173
|
+
},
|
|
174
|
+
};
|
|
175
|
+
if (options.name) {
|
|
176
|
+
result.format.name = options.name;
|
|
177
|
+
}
|
|
178
|
+
if (options.description) {
|
|
179
|
+
result.format.description = options.description;
|
|
180
|
+
}
|
|
181
|
+
let converted = convertSchemaRecursive(schema);
|
|
182
|
+
if (converted.type !== "object") {
|
|
183
|
+
const wrapperName = options.name || "schema";
|
|
184
|
+
converted = {
|
|
185
|
+
type: "object",
|
|
186
|
+
required: [wrapperName],
|
|
187
|
+
additionalProperties: false,
|
|
188
|
+
properties: {
|
|
189
|
+
[wrapperName]: converted,
|
|
190
|
+
},
|
|
191
|
+
};
|
|
192
|
+
}
|
|
193
|
+
result.format.schema = converted;
|
|
194
|
+
return result;
|
|
195
|
+
}
|
package/dist/index.d.ts
ADDED
package/dist/index.js
ADDED
package/package.json
ADDED
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "mdi-llmkit",
|
|
3
|
+
"version": "0.1.0",
|
|
4
|
+
"description": "Utilities for managing multi-shot conversations and structured data handling in LLM applications",
|
|
5
|
+
"author": "Mikhail Voloshin",
|
|
6
|
+
"license": "MIT",
|
|
7
|
+
"type": "module",
|
|
8
|
+
"sideEffects": false,
|
|
9
|
+
"main": "dist/index.js",
|
|
10
|
+
"types": "dist/index.d.ts",
|
|
11
|
+
"exports": {
|
|
12
|
+
".": {
|
|
13
|
+
"types": "./dist/index.d.ts",
|
|
14
|
+
"import": "./dist/index.js"
|
|
15
|
+
}
|
|
16
|
+
},
|
|
17
|
+
"files": [
|
|
18
|
+
"dist"
|
|
19
|
+
],
|
|
20
|
+
"scripts": {
|
|
21
|
+
"build": "tsc -p tsconfig.json",
|
|
22
|
+
"test": "vitest run",
|
|
23
|
+
"prepublishOnly": "npm run test && npm run build"
|
|
24
|
+
},
|
|
25
|
+
"keywords": [
|
|
26
|
+
"llm",
|
|
27
|
+
"openai",
|
|
28
|
+
"responses-api",
|
|
29
|
+
"structured-outputs",
|
|
30
|
+
"chat"
|
|
31
|
+
],
|
|
32
|
+
"dependencies": {
|
|
33
|
+
"openai": "^6.2.0"
|
|
34
|
+
},
|
|
35
|
+
"devDependencies": {
|
|
36
|
+
"@types/node": "^22.18.10",
|
|
37
|
+
"typescript": "^5.9.2",
|
|
38
|
+
"vitest": "^3.2.4"
|
|
39
|
+
}
|
|
40
|
+
}
|