@gammatech/aijsx 0.5.0-dev.2024-03-14.2 → 0.5.0-dev.2024-03-19
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +105 -8
- package/dist/index.mjs +105 -8
- package/package.json +1 -1
package/dist/index.js
CHANGED
|
@@ -1127,6 +1127,25 @@ function escape(html) {
|
|
|
1127
1127
|
}
|
|
1128
1128
|
return html.replace(/&/g, "&").replace(/</g, "<").replace(/>/g, ">").replace(/"/g, """);
|
|
1129
1129
|
}
|
|
1130
|
+
function collapseTextNodes(nodes) {
|
|
1131
|
+
const result = [];
|
|
1132
|
+
let accumulatedText = "";
|
|
1133
|
+
for (const node of nodes) {
|
|
1134
|
+
if (node.nodeName === "#text") {
|
|
1135
|
+
accumulatedText += node.value;
|
|
1136
|
+
} else {
|
|
1137
|
+
if (accumulatedText !== "") {
|
|
1138
|
+
result.push(new XmlNode(node.parent, "#text", {}, accumulatedText));
|
|
1139
|
+
accumulatedText = "";
|
|
1140
|
+
}
|
|
1141
|
+
result.push(node);
|
|
1142
|
+
}
|
|
1143
|
+
}
|
|
1144
|
+
if (accumulatedText !== "") {
|
|
1145
|
+
result.push(new XmlNode(nodes[0].parent, "#text", {}, accumulatedText));
|
|
1146
|
+
}
|
|
1147
|
+
return result;
|
|
1148
|
+
}
|
|
1130
1149
|
|
|
1131
1150
|
// src/jsx-runtime.ts
|
|
1132
1151
|
function jsx(type, config, maybeKey) {
|
|
@@ -2033,13 +2052,16 @@ function OpenAIVisionChatCompletion(props, { logger, render, tracer, getContext
|
|
|
2033
2052
|
messages: openAIMessages,
|
|
2034
2053
|
stream: true
|
|
2035
2054
|
};
|
|
2055
|
+
const chatCompletionRequestToLog = cleanChatCompletionRequest(
|
|
2056
|
+
chatCompletionRequest
|
|
2057
|
+
);
|
|
2036
2058
|
const logRequestData = {
|
|
2037
2059
|
startTime,
|
|
2038
2060
|
model,
|
|
2039
2061
|
provider,
|
|
2040
2062
|
providerRegion,
|
|
2041
2063
|
inputMessages: renderedMessages,
|
|
2042
|
-
request:
|
|
2064
|
+
request: chatCompletionRequestToLog
|
|
2043
2065
|
};
|
|
2044
2066
|
logger.chatCompletionRequest("openai", logRequestData);
|
|
2045
2067
|
span.setAttributes({
|
|
@@ -2047,7 +2069,7 @@ function OpenAIVisionChatCompletion(props, { logger, render, tracer, getContext
|
|
|
2047
2069
|
provider,
|
|
2048
2070
|
providerRegion,
|
|
2049
2071
|
requestType: "openai",
|
|
2050
|
-
chatCompletionRequest
|
|
2072
|
+
chatCompletionRequest: chatCompletionRequestToLog
|
|
2051
2073
|
});
|
|
2052
2074
|
let chatResponse;
|
|
2053
2075
|
try {
|
|
@@ -2107,6 +2129,44 @@ function OpenAIVisionChatCompletion(props, { logger, render, tracer, getContext
|
|
|
2107
2129
|
}
|
|
2108
2130
|
);
|
|
2109
2131
|
}
|
|
2132
|
+
function cleanChatCompletionRequest(chatCompletionRequest) {
|
|
2133
|
+
const { messages, ...rest } = chatCompletionRequest;
|
|
2134
|
+
return {
|
|
2135
|
+
...rest,
|
|
2136
|
+
messages: messages.map((message) => {
|
|
2137
|
+
if (message.role !== "user") {
|
|
2138
|
+
return message;
|
|
2139
|
+
}
|
|
2140
|
+
if (typeof message.content === "string") {
|
|
2141
|
+
return message;
|
|
2142
|
+
}
|
|
2143
|
+
return {
|
|
2144
|
+
...message,
|
|
2145
|
+
content: message.content.map((part) => {
|
|
2146
|
+
if (part.type === "text") {
|
|
2147
|
+
return part;
|
|
2148
|
+
}
|
|
2149
|
+
if (part.image_url.url.startsWith("data:image")) {
|
|
2150
|
+
return {
|
|
2151
|
+
type: "image_url",
|
|
2152
|
+
image_url: {
|
|
2153
|
+
url: part.image_url.url.slice(0, 22) + "...",
|
|
2154
|
+
detail: part.image_url.detail
|
|
2155
|
+
}
|
|
2156
|
+
};
|
|
2157
|
+
}
|
|
2158
|
+
return {
|
|
2159
|
+
type: "image_url",
|
|
2160
|
+
image_url: {
|
|
2161
|
+
url: part.image_url.url,
|
|
2162
|
+
detail: part.image_url.detail
|
|
2163
|
+
}
|
|
2164
|
+
};
|
|
2165
|
+
})
|
|
2166
|
+
};
|
|
2167
|
+
})
|
|
2168
|
+
};
|
|
2169
|
+
}
|
|
2110
2170
|
|
|
2111
2171
|
// src/lib/openai/index.ts
|
|
2112
2172
|
var import_openai3 = require("openai");
|
|
@@ -2147,14 +2207,15 @@ function buildAnthropicMessages(childrenXml) {
|
|
|
2147
2207
|
}
|
|
2148
2208
|
for (const node of parsed.childNodes) {
|
|
2149
2209
|
if (node.nodeName === "UserMessage") {
|
|
2150
|
-
|
|
2210
|
+
const childNodes = collapseTextNodes(node.childNodes);
|
|
2211
|
+
if (childNodes.length === 1 && childNodes[0].nodeName === "#text") {
|
|
2151
2212
|
messages.push({
|
|
2152
|
-
content:
|
|
2213
|
+
content: childNodes[0].value,
|
|
2153
2214
|
role: "user"
|
|
2154
2215
|
});
|
|
2155
2216
|
continue;
|
|
2156
2217
|
}
|
|
2157
|
-
const parts =
|
|
2218
|
+
const parts = childNodes.map((n) => {
|
|
2158
2219
|
if (n.nodeName === "#text") {
|
|
2159
2220
|
return {
|
|
2160
2221
|
type: "text",
|
|
@@ -2174,7 +2235,7 @@ function buildAnthropicMessages(childrenXml) {
|
|
|
2174
2235
|
throw new Error(
|
|
2175
2236
|
"Invalid ChatCompletionContentPart, expecting text or ContentTypeImage"
|
|
2176
2237
|
);
|
|
2177
|
-
});
|
|
2238
|
+
}).filter((n) => n.type !== "text" || n.text.trim().length > 0);
|
|
2178
2239
|
messages.push({
|
|
2179
2240
|
content: parts,
|
|
2180
2241
|
role: "user"
|
|
@@ -2223,13 +2284,16 @@ function AnthropicChatCompletion(props, { render, logger, tracer, getContext })
|
|
|
2223
2284
|
temperature: props.temperature,
|
|
2224
2285
|
model: props.model
|
|
2225
2286
|
};
|
|
2287
|
+
const chatCompletionRequestToLog = cleanChatCompletionRequest2(
|
|
2288
|
+
anthropicCompletionRequest
|
|
2289
|
+
);
|
|
2226
2290
|
const logRequestData = {
|
|
2227
2291
|
startTime,
|
|
2228
2292
|
model: props.model,
|
|
2229
2293
|
provider,
|
|
2230
2294
|
providerRegion,
|
|
2231
2295
|
inputMessages,
|
|
2232
|
-
request:
|
|
2296
|
+
request: chatCompletionRequestToLog
|
|
2233
2297
|
};
|
|
2234
2298
|
logger.chatCompletionRequest("anthropic", logRequestData);
|
|
2235
2299
|
span.setAttributes({
|
|
@@ -2237,7 +2301,7 @@ function AnthropicChatCompletion(props, { render, logger, tracer, getContext })
|
|
|
2237
2301
|
provider,
|
|
2238
2302
|
providerRegion,
|
|
2239
2303
|
requestType: "anthropic",
|
|
2240
|
-
chatCompletionRequest:
|
|
2304
|
+
chatCompletionRequest: chatCompletionRequestToLog
|
|
2241
2305
|
});
|
|
2242
2306
|
let response;
|
|
2243
2307
|
try {
|
|
@@ -2353,6 +2417,39 @@ var renderChatMessageContent2 = (content) => {
|
|
|
2353
2417
|
throw new Error("Invalid ImageBlockParam type");
|
|
2354
2418
|
}).join("\n\n");
|
|
2355
2419
|
};
|
|
2420
|
+
function cleanChatCompletionRequest2(chatCompletionRequest) {
|
|
2421
|
+
const { messages, ...rest } = chatCompletionRequest;
|
|
2422
|
+
return {
|
|
2423
|
+
...rest,
|
|
2424
|
+
messages: messages.map((message) => {
|
|
2425
|
+
if (message.role !== "user") {
|
|
2426
|
+
return message;
|
|
2427
|
+
}
|
|
2428
|
+
if (typeof message.content === "string") {
|
|
2429
|
+
return message;
|
|
2430
|
+
}
|
|
2431
|
+
return {
|
|
2432
|
+
...message,
|
|
2433
|
+
content: message.content.map((part) => {
|
|
2434
|
+
if (typeof part === "string") {
|
|
2435
|
+
return part;
|
|
2436
|
+
} else if (part.type === "text") {
|
|
2437
|
+
return part;
|
|
2438
|
+
} else if ("source" in part && typeof part.source === "object") {
|
|
2439
|
+
return {
|
|
2440
|
+
...part,
|
|
2441
|
+
source: {
|
|
2442
|
+
...part.source,
|
|
2443
|
+
data: part.source.data.slice(0, 22) + "..."
|
|
2444
|
+
}
|
|
2445
|
+
};
|
|
2446
|
+
}
|
|
2447
|
+
return part;
|
|
2448
|
+
})
|
|
2449
|
+
};
|
|
2450
|
+
})
|
|
2451
|
+
};
|
|
2452
|
+
}
|
|
2356
2453
|
|
|
2357
2454
|
// src/lib/anthropic/ClaudeImageBlock.tsx
|
|
2358
2455
|
var ClaudeImageBlockParam = (_props) => {
|
package/dist/index.mjs
CHANGED
|
@@ -1036,6 +1036,25 @@ function escape(html) {
|
|
|
1036
1036
|
}
|
|
1037
1037
|
return html.replace(/&/g, "&").replace(/</g, "<").replace(/>/g, ">").replace(/"/g, """);
|
|
1038
1038
|
}
|
|
1039
|
+
function collapseTextNodes(nodes) {
|
|
1040
|
+
const result = [];
|
|
1041
|
+
let accumulatedText = "";
|
|
1042
|
+
for (const node of nodes) {
|
|
1043
|
+
if (node.nodeName === "#text") {
|
|
1044
|
+
accumulatedText += node.value;
|
|
1045
|
+
} else {
|
|
1046
|
+
if (accumulatedText !== "") {
|
|
1047
|
+
result.push(new XmlNode(node.parent, "#text", {}, accumulatedText));
|
|
1048
|
+
accumulatedText = "";
|
|
1049
|
+
}
|
|
1050
|
+
result.push(node);
|
|
1051
|
+
}
|
|
1052
|
+
}
|
|
1053
|
+
if (accumulatedText !== "") {
|
|
1054
|
+
result.push(new XmlNode(nodes[0].parent, "#text", {}, accumulatedText));
|
|
1055
|
+
}
|
|
1056
|
+
return result;
|
|
1057
|
+
}
|
|
1039
1058
|
|
|
1040
1059
|
// src/context.tsx
|
|
1041
1060
|
var StreamRenderContext = class _StreamRenderContext {
|
|
@@ -1935,13 +1954,16 @@ function OpenAIVisionChatCompletion(props, { logger, render, tracer, getContext
|
|
|
1935
1954
|
messages: openAIMessages,
|
|
1936
1955
|
stream: true
|
|
1937
1956
|
};
|
|
1957
|
+
const chatCompletionRequestToLog = cleanChatCompletionRequest(
|
|
1958
|
+
chatCompletionRequest
|
|
1959
|
+
);
|
|
1938
1960
|
const logRequestData = {
|
|
1939
1961
|
startTime,
|
|
1940
1962
|
model,
|
|
1941
1963
|
provider,
|
|
1942
1964
|
providerRegion,
|
|
1943
1965
|
inputMessages: renderedMessages,
|
|
1944
|
-
request:
|
|
1966
|
+
request: chatCompletionRequestToLog
|
|
1945
1967
|
};
|
|
1946
1968
|
logger.chatCompletionRequest("openai", logRequestData);
|
|
1947
1969
|
span.setAttributes({
|
|
@@ -1949,7 +1971,7 @@ function OpenAIVisionChatCompletion(props, { logger, render, tracer, getContext
|
|
|
1949
1971
|
provider,
|
|
1950
1972
|
providerRegion,
|
|
1951
1973
|
requestType: "openai",
|
|
1952
|
-
chatCompletionRequest
|
|
1974
|
+
chatCompletionRequest: chatCompletionRequestToLog
|
|
1953
1975
|
});
|
|
1954
1976
|
let chatResponse;
|
|
1955
1977
|
try {
|
|
@@ -2009,6 +2031,44 @@ function OpenAIVisionChatCompletion(props, { logger, render, tracer, getContext
|
|
|
2009
2031
|
}
|
|
2010
2032
|
);
|
|
2011
2033
|
}
|
|
2034
|
+
function cleanChatCompletionRequest(chatCompletionRequest) {
|
|
2035
|
+
const { messages, ...rest } = chatCompletionRequest;
|
|
2036
|
+
return {
|
|
2037
|
+
...rest,
|
|
2038
|
+
messages: messages.map((message) => {
|
|
2039
|
+
if (message.role !== "user") {
|
|
2040
|
+
return message;
|
|
2041
|
+
}
|
|
2042
|
+
if (typeof message.content === "string") {
|
|
2043
|
+
return message;
|
|
2044
|
+
}
|
|
2045
|
+
return {
|
|
2046
|
+
...message,
|
|
2047
|
+
content: message.content.map((part) => {
|
|
2048
|
+
if (part.type === "text") {
|
|
2049
|
+
return part;
|
|
2050
|
+
}
|
|
2051
|
+
if (part.image_url.url.startsWith("data:image")) {
|
|
2052
|
+
return {
|
|
2053
|
+
type: "image_url",
|
|
2054
|
+
image_url: {
|
|
2055
|
+
url: part.image_url.url.slice(0, 22) + "...",
|
|
2056
|
+
detail: part.image_url.detail
|
|
2057
|
+
}
|
|
2058
|
+
};
|
|
2059
|
+
}
|
|
2060
|
+
return {
|
|
2061
|
+
type: "image_url",
|
|
2062
|
+
image_url: {
|
|
2063
|
+
url: part.image_url.url,
|
|
2064
|
+
detail: part.image_url.detail
|
|
2065
|
+
}
|
|
2066
|
+
};
|
|
2067
|
+
})
|
|
2068
|
+
};
|
|
2069
|
+
})
|
|
2070
|
+
};
|
|
2071
|
+
}
|
|
2012
2072
|
|
|
2013
2073
|
// src/lib/openai/index.ts
|
|
2014
2074
|
import { OpenAI as OpenAIClient3 } from "openai";
|
|
@@ -2049,14 +2109,15 @@ function buildAnthropicMessages(childrenXml) {
|
|
|
2049
2109
|
}
|
|
2050
2110
|
for (const node of parsed.childNodes) {
|
|
2051
2111
|
if (node.nodeName === "UserMessage") {
|
|
2052
|
-
|
|
2112
|
+
const childNodes = collapseTextNodes(node.childNodes);
|
|
2113
|
+
if (childNodes.length === 1 && childNodes[0].nodeName === "#text") {
|
|
2053
2114
|
messages.push({
|
|
2054
|
-
content:
|
|
2115
|
+
content: childNodes[0].value,
|
|
2055
2116
|
role: "user"
|
|
2056
2117
|
});
|
|
2057
2118
|
continue;
|
|
2058
2119
|
}
|
|
2059
|
-
const parts =
|
|
2120
|
+
const parts = childNodes.map((n) => {
|
|
2060
2121
|
if (n.nodeName === "#text") {
|
|
2061
2122
|
return {
|
|
2062
2123
|
type: "text",
|
|
@@ -2076,7 +2137,7 @@ function buildAnthropicMessages(childrenXml) {
|
|
|
2076
2137
|
throw new Error(
|
|
2077
2138
|
"Invalid ChatCompletionContentPart, expecting text or ContentTypeImage"
|
|
2078
2139
|
);
|
|
2079
|
-
});
|
|
2140
|
+
}).filter((n) => n.type !== "text" || n.text.trim().length > 0);
|
|
2080
2141
|
messages.push({
|
|
2081
2142
|
content: parts,
|
|
2082
2143
|
role: "user"
|
|
@@ -2125,13 +2186,16 @@ function AnthropicChatCompletion(props, { render, logger, tracer, getContext })
|
|
|
2125
2186
|
temperature: props.temperature,
|
|
2126
2187
|
model: props.model
|
|
2127
2188
|
};
|
|
2189
|
+
const chatCompletionRequestToLog = cleanChatCompletionRequest2(
|
|
2190
|
+
anthropicCompletionRequest
|
|
2191
|
+
);
|
|
2128
2192
|
const logRequestData = {
|
|
2129
2193
|
startTime,
|
|
2130
2194
|
model: props.model,
|
|
2131
2195
|
provider,
|
|
2132
2196
|
providerRegion,
|
|
2133
2197
|
inputMessages,
|
|
2134
|
-
request:
|
|
2198
|
+
request: chatCompletionRequestToLog
|
|
2135
2199
|
};
|
|
2136
2200
|
logger.chatCompletionRequest("anthropic", logRequestData);
|
|
2137
2201
|
span.setAttributes({
|
|
@@ -2139,7 +2203,7 @@ function AnthropicChatCompletion(props, { render, logger, tracer, getContext })
|
|
|
2139
2203
|
provider,
|
|
2140
2204
|
providerRegion,
|
|
2141
2205
|
requestType: "anthropic",
|
|
2142
|
-
chatCompletionRequest:
|
|
2206
|
+
chatCompletionRequest: chatCompletionRequestToLog
|
|
2143
2207
|
});
|
|
2144
2208
|
let response;
|
|
2145
2209
|
try {
|
|
@@ -2255,6 +2319,39 @@ var renderChatMessageContent2 = (content) => {
|
|
|
2255
2319
|
throw new Error("Invalid ImageBlockParam type");
|
|
2256
2320
|
}).join("\n\n");
|
|
2257
2321
|
};
|
|
2322
|
+
function cleanChatCompletionRequest2(chatCompletionRequest) {
|
|
2323
|
+
const { messages, ...rest } = chatCompletionRequest;
|
|
2324
|
+
return {
|
|
2325
|
+
...rest,
|
|
2326
|
+
messages: messages.map((message) => {
|
|
2327
|
+
if (message.role !== "user") {
|
|
2328
|
+
return message;
|
|
2329
|
+
}
|
|
2330
|
+
if (typeof message.content === "string") {
|
|
2331
|
+
return message;
|
|
2332
|
+
}
|
|
2333
|
+
return {
|
|
2334
|
+
...message,
|
|
2335
|
+
content: message.content.map((part) => {
|
|
2336
|
+
if (typeof part === "string") {
|
|
2337
|
+
return part;
|
|
2338
|
+
} else if (part.type === "text") {
|
|
2339
|
+
return part;
|
|
2340
|
+
} else if ("source" in part && typeof part.source === "object") {
|
|
2341
|
+
return {
|
|
2342
|
+
...part,
|
|
2343
|
+
source: {
|
|
2344
|
+
...part.source,
|
|
2345
|
+
data: part.source.data.slice(0, 22) + "..."
|
|
2346
|
+
}
|
|
2347
|
+
};
|
|
2348
|
+
}
|
|
2349
|
+
return part;
|
|
2350
|
+
})
|
|
2351
|
+
};
|
|
2352
|
+
})
|
|
2353
|
+
};
|
|
2354
|
+
}
|
|
2258
2355
|
|
|
2259
2356
|
// src/lib/anthropic/ClaudeImageBlock.tsx
|
|
2260
2357
|
var ClaudeImageBlockParam = (_props) => {
|