@gammatech/aijsx 0.1.1 → 0.1.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/chunk-7GA5BUUP.mjs +46 -0
- package/dist/createElement-Q_LxUYf8.d.mts +159 -0
- package/dist/index.d.mts +72 -0
- package/dist/index.mjs +771 -0
- package/dist/jsx-dev-runtime.d.mts +2 -0
- package/dist/jsx-dev-runtime.d.ts +2 -0
- package/dist/jsx-dev-runtime.js +63 -0
- package/dist/jsx-dev-runtime.mjs +12 -0
- package/dist/jsx-runtime.d.mts +29 -0
- package/dist/jsx-runtime.mjs +12 -0
- package/package.json +4 -6
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
// src/createElement.ts
|
|
2
|
+
function createAIElement(tag, props, ...children) {
|
|
3
|
+
const propsToPass = {
|
|
4
|
+
...props ?? {},
|
|
5
|
+
...children.length === 0 ? {} : { children: children.length === 1 ? children[0] : children }
|
|
6
|
+
};
|
|
7
|
+
const result = {
|
|
8
|
+
tag,
|
|
9
|
+
props: propsToPass,
|
|
10
|
+
render: (ctx) => {
|
|
11
|
+
return tag(propsToPass, ctx);
|
|
12
|
+
}
|
|
13
|
+
};
|
|
14
|
+
return result;
|
|
15
|
+
}
|
|
16
|
+
function isAIElement(value) {
|
|
17
|
+
return value !== null && typeof value === "object" && "tag" in value;
|
|
18
|
+
}
|
|
19
|
+
function isLiteral(value) {
|
|
20
|
+
return typeof value === "string" || typeof value === "number" || typeof value === "undefined" || typeof value === "boolean" || // capture null + undefined
|
|
21
|
+
value == null;
|
|
22
|
+
}
|
|
23
|
+
function AIFragment({ children }) {
|
|
24
|
+
return children;
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
// src/jsx-runtime.ts
|
|
28
|
+
function jsx(type, config, maybeKey) {
|
|
29
|
+
const configWithKey = maybeKey !== void 0 ? { ...config, key: maybeKey } : config;
|
|
30
|
+
const children = config && Array.isArray(config.children) ? config.children : [];
|
|
31
|
+
return createAIElement(type, configWithKey, ...children);
|
|
32
|
+
}
|
|
33
|
+
var jsxDEV = jsx;
|
|
34
|
+
var jsxs = jsx;
|
|
35
|
+
var Fragment = AIFragment;
|
|
36
|
+
|
|
37
|
+
export {
|
|
38
|
+
createAIElement,
|
|
39
|
+
isAIElement,
|
|
40
|
+
isLiteral,
|
|
41
|
+
AIFragment,
|
|
42
|
+
jsx,
|
|
43
|
+
jsxDEV,
|
|
44
|
+
jsxs,
|
|
45
|
+
Fragment
|
|
46
|
+
};
|
|
@@ -0,0 +1,159 @@
|
|
|
1
|
+
type Literal = string | number | null | undefined | boolean;
|
|
2
|
+
interface RenderableStream {
|
|
3
|
+
[Symbol.asyncIterator]: () => AsyncGenerator<string, void, unknown>;
|
|
4
|
+
}
|
|
5
|
+
interface RenderResult extends RenderableStream {
|
|
6
|
+
then: (onResolved: (value: string) => void, onRejected?: (reason?: any) => void) => void;
|
|
7
|
+
}
|
|
8
|
+
interface Context<T> {
|
|
9
|
+
Provider: AIComponent<{
|
|
10
|
+
children: AINode;
|
|
11
|
+
value: T;
|
|
12
|
+
}>;
|
|
13
|
+
defaultValue: T;
|
|
14
|
+
key: symbol;
|
|
15
|
+
}
|
|
16
|
+
type AIComponent<P> = (props: P, context: RenderContext) => Renderable;
|
|
17
|
+
declare const attachedContextSymbol: unique symbol;
|
|
18
|
+
interface AIElement<P> {
|
|
19
|
+
/** The tag associated with this {@link AIElement}. */
|
|
20
|
+
tag: AIComponent<P>;
|
|
21
|
+
/** The component properties. */
|
|
22
|
+
props: P;
|
|
23
|
+
/** A function that renders this {@link AIElement} to a {@link Renderable}. */
|
|
24
|
+
render: (ctx: RenderContext) => Renderable;
|
|
25
|
+
/** The {@link RenderContext} associated with this {@link Element}. */
|
|
26
|
+
[attachedContextSymbol]?: Record<symbol, any>;
|
|
27
|
+
}
|
|
28
|
+
type AINode = Literal | AIElement<any> | AINode[];
|
|
29
|
+
type Renderable = AINode | PromiseLike<Renderable> | RenderableStream;
|
|
30
|
+
type PropsOfAIComponent<T extends AIComponent<any>> = T extends AIComponent<infer P> ? P : never;
|
|
31
|
+
|
|
32
|
+
declare const LoggerContext: Context<LogImplementation>;
|
|
33
|
+
interface RenderContext {
|
|
34
|
+
parentContext: RenderContext | null;
|
|
35
|
+
element: AIElement<any>;
|
|
36
|
+
renderId: string;
|
|
37
|
+
logger: Logger;
|
|
38
|
+
getContext<T>(context: Context<T>): T;
|
|
39
|
+
render(renderable: Renderable): RenderResult;
|
|
40
|
+
}
|
|
41
|
+
declare function createContext<T>(defaultValue: T): Context<T>;
|
|
42
|
+
|
|
43
|
+
/**
|
|
44
|
+
* This can be extended using declare module to add additional providers.
|
|
45
|
+
*/
|
|
46
|
+
interface ChatCompletionRequestPayloads {
|
|
47
|
+
}
|
|
48
|
+
interface LogChatCompletionRequest<R extends Record<string, any> = ChatCompletionRequestPayloads[keyof ChatCompletionRequestPayloads]> {
|
|
49
|
+
startTime: number;
|
|
50
|
+
model: string;
|
|
51
|
+
providerRegion?: string;
|
|
52
|
+
provider?: string;
|
|
53
|
+
inputMessages: RenderedConversationMessage[];
|
|
54
|
+
request: R;
|
|
55
|
+
}
|
|
56
|
+
interface LogChatCompletionResponse<R extends Record<string, any> = ChatCompletionRequestPayloads[keyof ChatCompletionRequestPayloads]> extends LogChatCompletionRequest<R> {
|
|
57
|
+
latency: number;
|
|
58
|
+
outputMessage: RenderedConversationMessage;
|
|
59
|
+
finishReason: string;
|
|
60
|
+
tokensUsed: {
|
|
61
|
+
prompt: number;
|
|
62
|
+
completion: number;
|
|
63
|
+
total: number;
|
|
64
|
+
};
|
|
65
|
+
}
|
|
66
|
+
type LogLevel = 'error' | 'warn' | 'info' | 'debug';
|
|
67
|
+
type Loggable = string | number | boolean | undefined | null | object;
|
|
68
|
+
type Logger = {
|
|
69
|
+
error: (...msg: Loggable[]) => void;
|
|
70
|
+
warn: (...msg: Loggable[]) => void;
|
|
71
|
+
info: (...msg: Loggable[]) => void;
|
|
72
|
+
debug: (...msg: Loggable[]) => void;
|
|
73
|
+
logException: (exception: unknown) => void;
|
|
74
|
+
chatCompletionRequest: <K extends keyof ChatCompletionRequestPayloads>(provider: K, payload: LogChatCompletionRequest<ChatCompletionRequestPayloads[K]>) => void;
|
|
75
|
+
chatCompletionResponse: <K extends keyof ChatCompletionRequestPayloads>(provider: K, payload: LogChatCompletionResponse<ChatCompletionRequestPayloads[K]>) => void;
|
|
76
|
+
};
|
|
77
|
+
declare abstract class LogImplementation {
|
|
78
|
+
protected readonly loggedExceptions: WeakMap<object, boolean>;
|
|
79
|
+
/**
|
|
80
|
+
* @param ctx The current RenderContext
|
|
81
|
+
* @param level The log level, e.g. 'error', 'warn', 'info', 'debug'
|
|
82
|
+
* @param message
|
|
83
|
+
*/
|
|
84
|
+
abstract log(ctx: RenderContext, level: LogLevel, message: string): void;
|
|
85
|
+
/**
|
|
86
|
+
* Logs exceptions thrown during an element's render.
|
|
87
|
+
*/
|
|
88
|
+
logException(ctx: RenderContext, exception: unknown): void;
|
|
89
|
+
chatCompletionRequest<K extends keyof ChatCompletionRequestPayloads>(_ctx: RenderContext, _provider: K, _payload: LogChatCompletionRequest<ChatCompletionRequestPayloads[K]>): void;
|
|
90
|
+
chatCompletionResponse<K extends keyof ChatCompletionRequestPayloads>(_ctx: RenderContext, _provider: K, _payload: LogChatCompletionResponse<ChatCompletionRequestPayloads[K]>): void;
|
|
91
|
+
}
|
|
92
|
+
declare class BoundLogger implements Logger {
|
|
93
|
+
private readonly impl;
|
|
94
|
+
private readonly ctx;
|
|
95
|
+
constructor(impl: LogImplementation, ctx: RenderContext);
|
|
96
|
+
private formatMessage;
|
|
97
|
+
error: (...msgs: Loggable[]) => void;
|
|
98
|
+
warn: (...msgs: Loggable[]) => void;
|
|
99
|
+
info: (...msgs: Loggable[]) => void;
|
|
100
|
+
debug: (...msgs: Loggable[]) => void;
|
|
101
|
+
logException: (exception: unknown) => void;
|
|
102
|
+
chatCompletionRequest: <K extends keyof ChatCompletionRequestPayloads>(provider: K, payload: LogChatCompletionRequest<ChatCompletionRequestPayloads[K]>) => void;
|
|
103
|
+
chatCompletionResponse: <K extends keyof ChatCompletionRequestPayloads>(provider: K, payload: LogChatCompletionResponse<ChatCompletionRequestPayloads[K]>) => void;
|
|
104
|
+
}
|
|
105
|
+
declare class NoopLogImplementation extends LogImplementation {
|
|
106
|
+
log(_ctx: RenderContext, _level: LogLevel, _message: string): void;
|
|
107
|
+
}
|
|
108
|
+
declare class ConsoleLogger extends LogImplementation {
|
|
109
|
+
log(ctx: RenderContext, level: LogLevel, message: string): void;
|
|
110
|
+
}
|
|
111
|
+
declare class CombinedLogger extends LogImplementation {
|
|
112
|
+
private readonly loggers;
|
|
113
|
+
constructor(loggers: LogImplementation[]);
|
|
114
|
+
log(...args: Parameters<LogImplementation['log']>): void;
|
|
115
|
+
chatCompletionRequest<K extends keyof ChatCompletionRequestPayloads>(...args: Parameters<LogImplementation['chatCompletionRequest']>): void;
|
|
116
|
+
chatCompletionResponse<K extends keyof ChatCompletionRequestPayloads>(...args: Parameters<LogImplementation['chatCompletionResponse']>): void;
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
type ChatCompletionRole = 'user' | 'system' | 'assistant';
|
|
120
|
+
declare const SystemMessage: (props: {
|
|
121
|
+
children: AINode;
|
|
122
|
+
}) => AINode;
|
|
123
|
+
declare const UserMessage: (props: {
|
|
124
|
+
children: AINode;
|
|
125
|
+
}) => AINode;
|
|
126
|
+
declare const AssistantMessage: (props: {
|
|
127
|
+
children: AINode;
|
|
128
|
+
}) => AINode;
|
|
129
|
+
interface ConversationMessageType<T extends ChatCompletionRole, C extends AIComponent<any>> {
|
|
130
|
+
type: T;
|
|
131
|
+
element: AIElement<PropsOfAIComponent<C>>;
|
|
132
|
+
}
|
|
133
|
+
type ConversationMessage = ConversationMessageType<'user', typeof UserMessage> | ConversationMessageType<'assistant', typeof AssistantMessage> | ConversationMessageType<'system', typeof SystemMessage>;
|
|
134
|
+
type RenderedConversationMessage = ConversationMessage & {
|
|
135
|
+
content: string;
|
|
136
|
+
tokens: number;
|
|
137
|
+
};
|
|
138
|
+
declare const childrenToConversationMessage: (c: AIElement<any> | AIElement<any>[]) => ConversationMessage[];
|
|
139
|
+
declare const computeUsage: (messages: RenderedConversationMessage[]) => {
|
|
140
|
+
prompt: number;
|
|
141
|
+
completion: number;
|
|
142
|
+
total: number;
|
|
143
|
+
};
|
|
144
|
+
declare class ChatCompletionError extends Error {
|
|
145
|
+
readonly chatCompletionRequest: LogChatCompletionRequest;
|
|
146
|
+
constructor(message: string, chatCompletionRequest: LogChatCompletionRequest);
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
declare function createAIElement<P extends {
|
|
150
|
+
children: C;
|
|
151
|
+
}, C>(tag: AIComponent<P>, props: Omit<P, 'children'> | null, ...children: [C]): AIElement<P>;
|
|
152
|
+
declare function createAIElement<P extends {
|
|
153
|
+
children: C[];
|
|
154
|
+
}, C>(tag: AIComponent<P>, props: Omit<P, 'children'> | null, ...children: C[]): AIElement<P>;
|
|
155
|
+
declare function AIFragment({ children }: {
|
|
156
|
+
children: AINode;
|
|
157
|
+
}): Renderable;
|
|
158
|
+
|
|
159
|
+
export { type AIElement as A, BoundLogger as B, type Context as C, LogImplementation as L, NoopLogImplementation as N, type PropsOfAIComponent as P, type RenderContext as R, SystemMessage as S, UserMessage as U, type RenderedConversationMessage as a, AIFragment as b, createAIElement as c, LoggerContext as d, createContext as e, AssistantMessage as f, type ConversationMessage as g, childrenToConversationMessage as h, computeUsage as i, ChatCompletionError as j, type ChatCompletionRequestPayloads as k, type LogChatCompletionRequest as l, type LogChatCompletionResponse as m, type LogLevel as n, type Logger as o, ConsoleLogger as p, CombinedLogger as q, type Literal as r, type RenderableStream as s, type RenderResult as t, type AIComponent as u, attachedContextSymbol as v, type AINode as w, type Renderable as x };
|
package/dist/index.d.mts
ADDED
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
import { L as LogImplementation, R as RenderContext, C as Context, A as AIElement, a as RenderedConversationMessage } from './createElement-Q_LxUYf8.mjs';
|
|
2
|
+
export { u as AIComponent, b as AIFragment, w as AINode, f as AssistantMessage, B as BoundLogger, j as ChatCompletionError, k as ChatCompletionRequestPayloads, q as CombinedLogger, p as ConsoleLogger, g as ConversationMessage, r as Literal, l as LogChatCompletionRequest, m as LogChatCompletionResponse, n as LogLevel, o as Logger, d as LoggerContext, N as NoopLogImplementation, P as PropsOfAIComponent, t as RenderResult, x as Renderable, s as RenderableStream, S as SystemMessage, U as UserMessage, v as attachedContextSymbol, h as childrenToConversationMessage, i as computeUsage, c as createAIElement, e as createContext } from './createElement-Q_LxUYf8.mjs';
|
|
3
|
+
import { OpenAI } from 'openai';
|
|
4
|
+
export { OpenAI as OpenAIClient } from 'openai';
|
|
5
|
+
import AnthropicClient from '@anthropic-ai/sdk';
|
|
6
|
+
export { default as AnthropicClient } from '@anthropic-ai/sdk';
|
|
7
|
+
export { countTokens as countAnthropicTokens } from '@anthropic-ai/tokenizer';
|
|
8
|
+
|
|
9
|
+
declare function createRenderContext({ logger, rootRenderId, }?: {
|
|
10
|
+
logger?: LogImplementation;
|
|
11
|
+
rootRenderId?: string;
|
|
12
|
+
}): RenderContext;
|
|
13
|
+
|
|
14
|
+
type OpenAIChatCompletionRequest = OpenAI.Chat.Completions.ChatCompletionCreateParamsStreaming;
|
|
15
|
+
declare module '@gammatech/aijsx' {
|
|
16
|
+
interface ChatCompletionRequestPayloads {
|
|
17
|
+
openai: OpenAIChatCompletionRequest;
|
|
18
|
+
}
|
|
19
|
+
}
|
|
20
|
+
type ValidOpenAIChatModel = 'gpt-4' | 'gpt-4-0314' | 'gpt-4-0613' | 'gpt-4-32k' | 'gpt-4-32k-0314' | 'gpt-4-32k-0613' | 'gpt-4-1106-preview' | 'gpt-3.5-turbo' | 'gpt-3.5-turbo-0301' | 'gpt-3.5-turbo-0613' | 'gpt-3.5-turbo-16k' | 'gpt-3.5-turbo-16k-0613' | 'gpt-3.5-turbo-1106';
|
|
21
|
+
declare const OpenAIClientContext: Context<() => OpenAI>;
|
|
22
|
+
type OpenAIChatCompletionProps = {
|
|
23
|
+
model: ValidOpenAIChatModel;
|
|
24
|
+
maxTokens?: number;
|
|
25
|
+
temperature?: number;
|
|
26
|
+
children: AIElement<any> | AIElement<any>[];
|
|
27
|
+
provider?: string;
|
|
28
|
+
providerRegion?: string;
|
|
29
|
+
};
|
|
30
|
+
declare function OpenAIChatCompletion(props: OpenAIChatCompletionProps, { logger, render, getContext }: RenderContext): AsyncGenerator<string, void, unknown>;
|
|
31
|
+
|
|
32
|
+
declare const tokenizer: {
|
|
33
|
+
encode: (text: string) => number[];
|
|
34
|
+
decode: (tokens: number[]) => string;
|
|
35
|
+
};
|
|
36
|
+
declare function tokenLimitForChatModel(model: ValidOpenAIChatModel): number | undefined;
|
|
37
|
+
declare function tokenCountForConversationMessage(message: Pick<RenderedConversationMessage, 'type' | 'content'>): number;
|
|
38
|
+
|
|
39
|
+
type AnthropicChatCompletionRequest = AnthropicClient.CompletionCreateParams;
|
|
40
|
+
declare module '@gammatech/aijsx' {
|
|
41
|
+
interface ChatCompletionRequestPayloads {
|
|
42
|
+
anthropic: AnthropicChatCompletionRequest;
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
/**
|
|
46
|
+
* The set of valid Claude models.
|
|
47
|
+
* @see https://docs.anthropic.com/claude/reference/selecting-a-model
|
|
48
|
+
*/
|
|
49
|
+
type ValidAnthropicChatModel = 'claude-instant-1.2' | 'claude-2.1';
|
|
50
|
+
declare const AnthropicClientContext: Context<() => AnthropicClient>;
|
|
51
|
+
/**
|
|
52
|
+
* If you use an Anthropic model without specifying the max tokens for the completion, this value will be used as the default.
|
|
53
|
+
*/
|
|
54
|
+
declare const defaultMaxTokens = 4096;
|
|
55
|
+
type AnthropicChatCompletionProps = {
|
|
56
|
+
model: ValidAnthropicChatModel;
|
|
57
|
+
maxTokens?: number;
|
|
58
|
+
temperature?: number;
|
|
59
|
+
children: AIElement<any> | AIElement<any>[];
|
|
60
|
+
provider?: string;
|
|
61
|
+
providerRegion?: string;
|
|
62
|
+
};
|
|
63
|
+
/**
|
|
64
|
+
* An AI.JSX component that invokes an Anthropic Large Language Model.
|
|
65
|
+
* @param children The children to render.
|
|
66
|
+
* @param chatModel The chat model to use.
|
|
67
|
+
* @param completionModel The completion model to use.
|
|
68
|
+
* @param client The Anthropic client.
|
|
69
|
+
*/
|
|
70
|
+
declare function AnthropicChatCompletion(props: AnthropicChatCompletionProps, { render, logger, getContext }: RenderContext): AsyncGenerator<string, void, unknown>;
|
|
71
|
+
|
|
72
|
+
export { AIElement, AnthropicChatCompletion, type AnthropicChatCompletionRequest, AnthropicClientContext, Context, LogImplementation, OpenAIChatCompletion, type OpenAIChatCompletionRequest, OpenAIClientContext, RenderContext, RenderedConversationMessage, type ValidAnthropicChatModel, type ValidOpenAIChatModel, createRenderContext, defaultMaxTokens, tokenCountForConversationMessage, tokenLimitForChatModel, tokenizer };
|
package/dist/index.mjs
ADDED
|
@@ -0,0 +1,771 @@
|
|
|
1
|
+
import {
|
|
2
|
+
AIFragment,
|
|
3
|
+
createAIElement,
|
|
4
|
+
isAIElement,
|
|
5
|
+
isLiteral,
|
|
6
|
+
jsx,
|
|
7
|
+
jsxs
|
|
8
|
+
} from "./chunk-7GA5BUUP.mjs";
|
|
9
|
+
|
|
10
|
+
// src/chat.tsx
|
|
11
|
+
var SystemMessage = (props) => {
|
|
12
|
+
return props.children;
|
|
13
|
+
};
|
|
14
|
+
var UserMessage = (props) => {
|
|
15
|
+
return props.children;
|
|
16
|
+
};
|
|
17
|
+
var AssistantMessage = (props) => {
|
|
18
|
+
return props.children;
|
|
19
|
+
};
|
|
20
|
+
var childrenToConversationMessage = (c) => {
|
|
21
|
+
const children = Array.isArray(c) ? c : [c];
|
|
22
|
+
return children.map((child) => {
|
|
23
|
+
if (child.tag.name === "UserMessage") {
|
|
24
|
+
return {
|
|
25
|
+
type: "user",
|
|
26
|
+
element: child
|
|
27
|
+
};
|
|
28
|
+
} else if (child.tag.name === "SystemMessage") {
|
|
29
|
+
return {
|
|
30
|
+
type: "system",
|
|
31
|
+
element: child
|
|
32
|
+
};
|
|
33
|
+
} else if (child.tag.name === "AssistantMessage") {
|
|
34
|
+
return {
|
|
35
|
+
type: "assistant",
|
|
36
|
+
element: child
|
|
37
|
+
};
|
|
38
|
+
} else {
|
|
39
|
+
throw new Error("OpenAI: unknown message type");
|
|
40
|
+
}
|
|
41
|
+
});
|
|
42
|
+
};
|
|
43
|
+
var computeUsage = (messages) => {
|
|
44
|
+
const prompt = messages.filter((m) => m.type === "user" || m.type === "system").reduce((acc, m) => acc + m.tokens, 0);
|
|
45
|
+
const completion = messages.filter((m) => m.type === "assistant").reduce((acc, m) => acc + m.tokens, 0);
|
|
46
|
+
return {
|
|
47
|
+
prompt,
|
|
48
|
+
completion,
|
|
49
|
+
total: prompt + completion
|
|
50
|
+
};
|
|
51
|
+
};
|
|
52
|
+
var ChatCompletionError = class extends Error {
|
|
53
|
+
constructor(message, chatCompletionRequest) {
|
|
54
|
+
super(message);
|
|
55
|
+
this.chatCompletionRequest = chatCompletionRequest;
|
|
56
|
+
}
|
|
57
|
+
};
|
|
58
|
+
|
|
59
|
+
// src/EventEmitter.ts
|
|
60
|
+
var EventEmitter = class {
|
|
61
|
+
listeners = {};
|
|
62
|
+
piped = [];
|
|
63
|
+
on(key, handler) {
|
|
64
|
+
this.listeners[key] = this.listeners[key] || [];
|
|
65
|
+
this.listeners[key].push(handler);
|
|
66
|
+
return () => {
|
|
67
|
+
const ind = this.listeners[key].indexOf(handler);
|
|
68
|
+
if (ind > -1) {
|
|
69
|
+
this.listeners[key].splice(ind, 1);
|
|
70
|
+
}
|
|
71
|
+
};
|
|
72
|
+
}
|
|
73
|
+
off(key, handler) {
|
|
74
|
+
if (typeof handler === "function") {
|
|
75
|
+
const ind = this.listeners[key].indexOf(handler);
|
|
76
|
+
if (ind > -1) {
|
|
77
|
+
this.listeners[key].splice(ind, 1);
|
|
78
|
+
}
|
|
79
|
+
} else {
|
|
80
|
+
this.listeners[key] = [];
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
emit(key, payload) {
|
|
84
|
+
const fns = this.listeners[key] || [];
|
|
85
|
+
fns.forEach((fn) => fn(payload));
|
|
86
|
+
this.piped.forEach((emitter) => {
|
|
87
|
+
emitter.emit(key, payload);
|
|
88
|
+
});
|
|
89
|
+
}
|
|
90
|
+
pipe(emitter) {
|
|
91
|
+
this.piped.push(emitter);
|
|
92
|
+
return () => {
|
|
93
|
+
const ind = this.piped.indexOf(emitter);
|
|
94
|
+
if (ind > -1) {
|
|
95
|
+
this.piped.splice(ind, 1);
|
|
96
|
+
}
|
|
97
|
+
};
|
|
98
|
+
}
|
|
99
|
+
};
|
|
100
|
+
|
|
101
|
+
// src/log.ts
|
|
102
|
+
var LogImplementation = class {
|
|
103
|
+
loggedExceptions = /* @__PURE__ */ new WeakMap();
|
|
104
|
+
/**
|
|
105
|
+
* Logs exceptions thrown during an element's render.
|
|
106
|
+
*/
|
|
107
|
+
logException(ctx, exception) {
|
|
108
|
+
if (typeof exception === "object" && exception !== null) {
|
|
109
|
+
if (this.loggedExceptions.has(exception)) {
|
|
110
|
+
return;
|
|
111
|
+
}
|
|
112
|
+
this.loggedExceptions.set(exception, true);
|
|
113
|
+
}
|
|
114
|
+
const elementTag = `<${ctx.element.tag.name}>`;
|
|
115
|
+
this.log(
|
|
116
|
+
ctx,
|
|
117
|
+
"error",
|
|
118
|
+
`Rendering element ${elementTag} failed with exception: ${exception}`
|
|
119
|
+
);
|
|
120
|
+
}
|
|
121
|
+
chatCompletionRequest(_ctx, _provider, _payload) {
|
|
122
|
+
}
|
|
123
|
+
chatCompletionResponse(_ctx, _provider, _payload) {
|
|
124
|
+
}
|
|
125
|
+
};
|
|
126
|
+
var BoundLogger = class {
|
|
127
|
+
constructor(impl, ctx) {
|
|
128
|
+
this.impl = impl;
|
|
129
|
+
this.ctx = ctx;
|
|
130
|
+
}
|
|
131
|
+
formatMessage = (...msgs) => msgs.map((m) => {
|
|
132
|
+
if (typeof m === "string") {
|
|
133
|
+
return m;
|
|
134
|
+
} else if (typeof m === "number") {
|
|
135
|
+
return m.toString();
|
|
136
|
+
} else if (typeof m === "boolean") {
|
|
137
|
+
return m ? "true" : "false";
|
|
138
|
+
} else if (m === void 0) {
|
|
139
|
+
return "undefined";
|
|
140
|
+
} else if (m === null) {
|
|
141
|
+
return "null";
|
|
142
|
+
} else {
|
|
143
|
+
return JSON.stringify(m);
|
|
144
|
+
}
|
|
145
|
+
}).join(" ");
|
|
146
|
+
error = (...msgs) => this.impl.log(this.ctx, "error", this.formatMessage(...msgs));
|
|
147
|
+
warn = (...msgs) => this.impl.log(this.ctx, "warn", this.formatMessage(...msgs));
|
|
148
|
+
info = (...msgs) => this.impl.log(this.ctx, "info", this.formatMessage(...msgs));
|
|
149
|
+
debug = (...msgs) => this.impl.log(this.ctx, "debug", this.formatMessage(...msgs));
|
|
150
|
+
logException = (exception) => this.impl.logException(this.ctx, exception);
|
|
151
|
+
chatCompletionRequest = (provider, payload) => {
|
|
152
|
+
return this.impl.chatCompletionRequest(this.ctx, provider, payload);
|
|
153
|
+
};
|
|
154
|
+
chatCompletionResponse = (provider, payload) => {
|
|
155
|
+
return this.impl.chatCompletionResponse(this.ctx, provider, payload);
|
|
156
|
+
};
|
|
157
|
+
};
|
|
158
|
+
var NoopLogImplementation = class extends LogImplementation {
|
|
159
|
+
log(_ctx, _level, _message) {
|
|
160
|
+
}
|
|
161
|
+
};
|
|
162
|
+
var ConsoleLogger = class extends LogImplementation {
|
|
163
|
+
log(ctx, level, message) {
|
|
164
|
+
console.log(
|
|
165
|
+
`[${level}] <${ctx.element.tag.name}> id=${ctx.renderId} ${message}`
|
|
166
|
+
);
|
|
167
|
+
}
|
|
168
|
+
};
|
|
169
|
+
var CombinedLogger = class extends LogImplementation {
|
|
170
|
+
constructor(loggers) {
|
|
171
|
+
super();
|
|
172
|
+
this.loggers = loggers;
|
|
173
|
+
}
|
|
174
|
+
log(...args) {
|
|
175
|
+
this.loggers.forEach((l) => l.log(...args));
|
|
176
|
+
}
|
|
177
|
+
chatCompletionRequest(...args) {
|
|
178
|
+
this.loggers.forEach((l) => l.chatCompletionRequest(...args));
|
|
179
|
+
}
|
|
180
|
+
chatCompletionResponse(...args) {
|
|
181
|
+
this.loggers.forEach((l) => l.chatCompletionResponse(...args));
|
|
182
|
+
}
|
|
183
|
+
};
|
|
184
|
+
|
|
185
|
+
// src/utils.ts
|
|
186
|
+
import { nanoid } from "nanoid";
|
|
187
|
+
function uuidv4() {
|
|
188
|
+
return nanoid();
|
|
189
|
+
}
|
|
190
|
+
function getEnvVar(name, shouldThrow = true) {
|
|
191
|
+
let env = globalThis.process?.env ?? void 0;
|
|
192
|
+
if (env === void 0) {
|
|
193
|
+
try {
|
|
194
|
+
env = process.env;
|
|
195
|
+
} catch {
|
|
196
|
+
}
|
|
197
|
+
}
|
|
198
|
+
const result = env?.[name];
|
|
199
|
+
if (result === void 0 && shouldThrow) {
|
|
200
|
+
throw new Error(`Please specify env var '${name}'`);
|
|
201
|
+
}
|
|
202
|
+
return result;
|
|
203
|
+
}
|
|
204
|
+
|
|
205
|
+
// src/render.ts
|
|
206
|
+
function renderLiteral(renderable) {
|
|
207
|
+
if (typeof renderable === "string") {
|
|
208
|
+
return renderable;
|
|
209
|
+
}
|
|
210
|
+
if (typeof renderable === "number") {
|
|
211
|
+
return renderable.toString();
|
|
212
|
+
}
|
|
213
|
+
if (typeof renderable === "undefined" || typeof renderable === "boolean" || renderable === null) {
|
|
214
|
+
return "";
|
|
215
|
+
}
|
|
216
|
+
return "";
|
|
217
|
+
}
|
|
218
|
+
function Root() {
|
|
219
|
+
return null;
|
|
220
|
+
}
|
|
221
|
+
function createRenderContext({
|
|
222
|
+
logger = new NoopLogImplementation(),
|
|
223
|
+
rootRenderId = uuidv4()
|
|
224
|
+
} = {}) {
|
|
225
|
+
return new StreamRenderContext(
|
|
226
|
+
null,
|
|
227
|
+
createAIElement(Root, {}),
|
|
228
|
+
rootRenderId,
|
|
229
|
+
{
|
|
230
|
+
[LoggerContext.key]: logger || LoggerContext.defaultValue
|
|
231
|
+
}
|
|
232
|
+
);
|
|
233
|
+
}
|
|
234
|
+
|
|
235
|
+
// src/types.ts
|
|
236
|
+
var attachedContextSymbol = Symbol("AI.attachedContext");
|
|
237
|
+
|
|
238
|
+
// src/context.ts
|
|
239
|
+
var LoggerContext = createContext(
|
|
240
|
+
new NoopLogImplementation()
|
|
241
|
+
);
|
|
242
|
+
var accumResults = async (result) => {
|
|
243
|
+
let accum = "";
|
|
244
|
+
const iterator = result[Symbol.asyncIterator]();
|
|
245
|
+
for await (const value of iterator) {
|
|
246
|
+
accum += value;
|
|
247
|
+
}
|
|
248
|
+
return accum;
|
|
249
|
+
};
|
|
250
|
+
var ParallelStreamIterator = class extends EventEmitter {
|
|
251
|
+
values = [];
|
|
252
|
+
completedStreams = [];
|
|
253
|
+
cursor = [0, 0];
|
|
254
|
+
constructor(size) {
|
|
255
|
+
super();
|
|
256
|
+
for (let i = 0; i < size; i++) {
|
|
257
|
+
this.values[i] = [];
|
|
258
|
+
this.completedStreams[i] = false;
|
|
259
|
+
}
|
|
260
|
+
}
|
|
261
|
+
push(streamInd, value) {
|
|
262
|
+
const valInd = this.values[streamInd].length;
|
|
263
|
+
this.values[streamInd].push(value);
|
|
264
|
+
this.emit("data", {
|
|
265
|
+
streamInd,
|
|
266
|
+
valInd,
|
|
267
|
+
value
|
|
268
|
+
});
|
|
269
|
+
}
|
|
270
|
+
complete(streamInd) {
|
|
271
|
+
this.completedStreams[streamInd] = true;
|
|
272
|
+
this.emit("complete", {
|
|
273
|
+
streamInd
|
|
274
|
+
});
|
|
275
|
+
}
|
|
276
|
+
nextCursor(complete) {
|
|
277
|
+
const [streamInd, valInd] = this.cursor;
|
|
278
|
+
if (!complete) {
|
|
279
|
+
this.cursor = [streamInd, valInd + 1];
|
|
280
|
+
} else {
|
|
281
|
+
this.cursor = [streamInd + 1, 0];
|
|
282
|
+
}
|
|
283
|
+
}
|
|
284
|
+
// returns a promise that resolves when this.values[streamInd][valInd] is available
|
|
285
|
+
resolveAt(streamInd, valInd) {
|
|
286
|
+
return new Promise((resolve, reject) => {
|
|
287
|
+
const value = this.values[streamInd][valInd];
|
|
288
|
+
if (value !== void 0) {
|
|
289
|
+
resolve({ done: false, value });
|
|
290
|
+
return;
|
|
291
|
+
}
|
|
292
|
+
if (this.completedStreams[streamInd]) {
|
|
293
|
+
if (streamInd === this.completedStreams.length - 1) {
|
|
294
|
+
resolve({ done: true, value: void 0 });
|
|
295
|
+
}
|
|
296
|
+
reject("next");
|
|
297
|
+
return;
|
|
298
|
+
}
|
|
299
|
+
const unsub = this.on("data", (data) => {
|
|
300
|
+
if (streamInd === data.streamInd && data.valInd === valInd) {
|
|
301
|
+
resolve({ done: false, value: data.value });
|
|
302
|
+
unsub();
|
|
303
|
+
onCompleteUnsub();
|
|
304
|
+
}
|
|
305
|
+
});
|
|
306
|
+
const onCompleteUnsub = this.on("complete", (data) => {
|
|
307
|
+
if (streamInd !== data.streamInd) {
|
|
308
|
+
return;
|
|
309
|
+
}
|
|
310
|
+
if (streamInd === this.completedStreams.length - 1) {
|
|
311
|
+
resolve({ done: true, value: void 0 });
|
|
312
|
+
}
|
|
313
|
+
if (this.values[streamInd].length === valInd) {
|
|
314
|
+
reject("next");
|
|
315
|
+
}
|
|
316
|
+
unsub();
|
|
317
|
+
onCompleteUnsub();
|
|
318
|
+
});
|
|
319
|
+
});
|
|
320
|
+
}
|
|
321
|
+
async next() {
|
|
322
|
+
try {
|
|
323
|
+
const val = await this.resolveAt(...this.cursor);
|
|
324
|
+
this.nextCursor(false);
|
|
325
|
+
return val;
|
|
326
|
+
} catch (e) {
|
|
327
|
+
if (e !== "next") {
|
|
328
|
+
throw e;
|
|
329
|
+
}
|
|
330
|
+
this.nextCursor(true);
|
|
331
|
+
const nextStreamVal = await this.resolveAt(...this.cursor);
|
|
332
|
+
this.nextCursor(false);
|
|
333
|
+
return nextStreamVal;
|
|
334
|
+
}
|
|
335
|
+
}
|
|
336
|
+
[Symbol.asyncIterator]() {
|
|
337
|
+
return this;
|
|
338
|
+
}
|
|
339
|
+
};
|
|
340
|
+
function coalesceParallelStreams(streams) {
|
|
341
|
+
const iter = new ParallelStreamIterator(streams.length);
|
|
342
|
+
streams.forEach(async (s, streamInd) => {
|
|
343
|
+
for await (const value of s) {
|
|
344
|
+
iter.push(streamInd, value);
|
|
345
|
+
}
|
|
346
|
+
iter.complete(streamInd);
|
|
347
|
+
});
|
|
348
|
+
return iter;
|
|
349
|
+
}
|
|
350
|
+
var StreamRenderContext = class _StreamRenderContext {
|
|
351
|
+
constructor(parentContext, element, renderId, contextValues) {
|
|
352
|
+
this.parentContext = parentContext;
|
|
353
|
+
this.element = element;
|
|
354
|
+
this.renderId = renderId;
|
|
355
|
+
this.contextValues = contextValues;
|
|
356
|
+
const logImpl = this.getContext(LoggerContext);
|
|
357
|
+
this.logger = new BoundLogger(logImpl, this);
|
|
358
|
+
this.render = (renderable) => {
|
|
359
|
+
const generator = this.renderStream(renderable);
|
|
360
|
+
const result = {
|
|
361
|
+
then: (onFulfilled, onRejected) => accumResults(generator).then(onFulfilled, onRejected),
|
|
362
|
+
[Symbol.asyncIterator]: () => generator
|
|
363
|
+
};
|
|
364
|
+
return result;
|
|
365
|
+
};
|
|
366
|
+
const self = this;
|
|
367
|
+
this.renderStream = async function* (renderable) {
|
|
368
|
+
if (isLiteral(renderable)) {
|
|
369
|
+
yield renderLiteral(renderable);
|
|
370
|
+
return;
|
|
371
|
+
}
|
|
372
|
+
if (isAIElement(renderable)) {
|
|
373
|
+
const ctxValues = attachedContextValues(renderable) || {};
|
|
374
|
+
const childRenderId = uuidv4();
|
|
375
|
+
const newCtx = self.enter(renderable, childRenderId, ctxValues);
|
|
376
|
+
const logger = newCtx.logger;
|
|
377
|
+
try {
|
|
378
|
+
return yield* newCtx.render(renderable.render(newCtx));
|
|
379
|
+
} catch (ex) {
|
|
380
|
+
logger.logException(ex);
|
|
381
|
+
throw ex;
|
|
382
|
+
}
|
|
383
|
+
}
|
|
384
|
+
if (Array.isArray(renderable)) {
|
|
385
|
+
if (renderable.every((r) => isLiteral(r))) {
|
|
386
|
+
yield renderable.map((r) => renderLiteral(r)).join("");
|
|
387
|
+
return;
|
|
388
|
+
}
|
|
389
|
+
const streams = renderable.filter((a) => !!a).map((r) => self.renderStream(r));
|
|
390
|
+
const result = coalesceParallelStreams(streams);
|
|
391
|
+
while (true) {
|
|
392
|
+
const { value, done } = await result.next();
|
|
393
|
+
if (done) {
|
|
394
|
+
return;
|
|
395
|
+
}
|
|
396
|
+
yield value;
|
|
397
|
+
}
|
|
398
|
+
}
|
|
399
|
+
if (Symbol.asyncIterator in renderable) {
|
|
400
|
+
return yield* renderable[Symbol.asyncIterator]();
|
|
401
|
+
}
|
|
402
|
+
if (!("then" in renderable)) {
|
|
403
|
+
throw new Error(
|
|
404
|
+
`Unexpected renderable type: ${JSON.stringify(renderable)}`
|
|
405
|
+
);
|
|
406
|
+
}
|
|
407
|
+
const next = await renderable.then(
|
|
408
|
+
(r) => r
|
|
409
|
+
);
|
|
410
|
+
return yield* self.render(next);
|
|
411
|
+
};
|
|
412
|
+
}
|
|
413
|
+
render;
|
|
414
|
+
renderStream;
|
|
415
|
+
logger;
|
|
416
|
+
getContext = (context) => {
|
|
417
|
+
return this.contextValues[context.key] ?? context.defaultValue;
|
|
418
|
+
};
|
|
419
|
+
// @internal
|
|
420
|
+
enter(element, renderId, newCtx) {
|
|
421
|
+
return new _StreamRenderContext(this, element, renderId, {
|
|
422
|
+
...this.contextValues,
|
|
423
|
+
...newCtx
|
|
424
|
+
});
|
|
425
|
+
}
|
|
426
|
+
};
|
|
427
|
+
function ContextValueProvider({ children }) {
|
|
428
|
+
return children;
|
|
429
|
+
}
|
|
430
|
+
function createContext(defaultValue) {
|
|
431
|
+
const key = Symbol();
|
|
432
|
+
return {
|
|
433
|
+
Provider: function ContextProvider(props, _compContext) {
|
|
434
|
+
const additionalContext = {
|
|
435
|
+
[key]: props.value
|
|
436
|
+
};
|
|
437
|
+
return withContextValues(
|
|
438
|
+
createAIElement(ContextValueProvider, null, props.children),
|
|
439
|
+
additionalContext
|
|
440
|
+
);
|
|
441
|
+
},
|
|
442
|
+
defaultValue,
|
|
443
|
+
key
|
|
444
|
+
};
|
|
445
|
+
}
|
|
446
|
+
function BoundContextValues({ children }) {
|
|
447
|
+
return children;
|
|
448
|
+
}
|
|
449
|
+
function withContextValues(renderable, additionalContext) {
|
|
450
|
+
if (isLiteral(renderable)) {
|
|
451
|
+
return renderable;
|
|
452
|
+
}
|
|
453
|
+
if (Array.isArray(renderable)) {
|
|
454
|
+
return renderable.map((node) => withContextValues(node, additionalContext));
|
|
455
|
+
}
|
|
456
|
+
if (isAIElement(renderable)) {
|
|
457
|
+
if (renderable[attachedContextSymbol]) {
|
|
458
|
+
return renderable;
|
|
459
|
+
}
|
|
460
|
+
const elementWithContext = {
|
|
461
|
+
...renderable,
|
|
462
|
+
[attachedContextSymbol]: additionalContext
|
|
463
|
+
};
|
|
464
|
+
return elementWithContext;
|
|
465
|
+
}
|
|
466
|
+
return withContextValues(
|
|
467
|
+
createAIElement(BoundContextValues, null, renderable),
|
|
468
|
+
additionalContext
|
|
469
|
+
);
|
|
470
|
+
}
|
|
471
|
+
function attachedContextValues(element) {
|
|
472
|
+
return element[attachedContextSymbol];
|
|
473
|
+
}
|
|
474
|
+
|
|
475
|
+
// src/lib/openai/OpenAI.tsx
|
|
476
|
+
import { OpenAI as OpenAIClient } from "openai";
|
|
477
|
+
|
|
478
|
+
// src/lib/openai/tokenizer.ts
|
|
479
|
+
import { getEncoding } from "js-tiktoken";
|
|
480
|
+
var cl100kTokenizer = getEncoding("cl100k_base");
|
|
481
|
+
var tokenizer = {
|
|
482
|
+
encode: (text) => cl100kTokenizer.encode(text),
|
|
483
|
+
decode: (tokens) => cl100kTokenizer.decode(tokens)
|
|
484
|
+
};
|
|
485
|
+
function tokenLimitForChatModel(model) {
|
|
486
|
+
const TOKENS_CONSUMED_BY_REPLY_PREFIX = 3;
|
|
487
|
+
switch (model) {
|
|
488
|
+
case "gpt-4":
|
|
489
|
+
case "gpt-4-0314":
|
|
490
|
+
case "gpt-4-0613":
|
|
491
|
+
return 8192 - TOKENS_CONSUMED_BY_REPLY_PREFIX;
|
|
492
|
+
case "gpt-4-32k":
|
|
493
|
+
case "gpt-4-32k-0314":
|
|
494
|
+
case "gpt-4-32k-0613":
|
|
495
|
+
return 32768 - TOKENS_CONSUMED_BY_REPLY_PREFIX;
|
|
496
|
+
case "gpt-4-1106-preview":
|
|
497
|
+
return 128e3 - TOKENS_CONSUMED_BY_REPLY_PREFIX;
|
|
498
|
+
case "gpt-3.5-turbo":
|
|
499
|
+
case "gpt-3.5-turbo-0301":
|
|
500
|
+
case "gpt-3.5-turbo-0613":
|
|
501
|
+
return 4096 - TOKENS_CONSUMED_BY_REPLY_PREFIX;
|
|
502
|
+
case "gpt-3.5-turbo-16k":
|
|
503
|
+
case "gpt-3.5-turbo-16k-0613":
|
|
504
|
+
case "gpt-3.5-turbo-1106":
|
|
505
|
+
return 16384 - TOKENS_CONSUMED_BY_REPLY_PREFIX;
|
|
506
|
+
default: {
|
|
507
|
+
const _ = model;
|
|
508
|
+
return void 0;
|
|
509
|
+
}
|
|
510
|
+
}
|
|
511
|
+
}
|
|
512
|
+
function tokenCountForConversationMessage(message) {
|
|
513
|
+
const TOKENS_PER_MESSAGE = 3;
|
|
514
|
+
switch (message.type) {
|
|
515
|
+
case "assistant":
|
|
516
|
+
case "system":
|
|
517
|
+
case "user":
|
|
518
|
+
return TOKENS_PER_MESSAGE + tokenizer.encode(message.content).length;
|
|
519
|
+
}
|
|
520
|
+
}
|
|
521
|
+
|
|
522
|
+
// src/lib/openai/OpenAI.tsx
|
|
523
|
+
var defaultClient = null;
|
|
524
|
+
var OpenAIClientContext = createContext(() => {
|
|
525
|
+
if (defaultClient) {
|
|
526
|
+
return defaultClient;
|
|
527
|
+
}
|
|
528
|
+
const apiKey = getEnvVar("OPENAI_API_KEY", true);
|
|
529
|
+
defaultClient = new OpenAIClient({ apiKey });
|
|
530
|
+
return defaultClient;
|
|
531
|
+
});
|
|
532
|
+
async function* OpenAIChatCompletion(props, { logger, render, getContext }) {
|
|
533
|
+
const startTime = performance.now();
|
|
534
|
+
const client = getContext(OpenAIClientContext)();
|
|
535
|
+
if (!client) {
|
|
536
|
+
throw new Error("[OpenAI] must supply OpenAI model via context");
|
|
537
|
+
}
|
|
538
|
+
const renderedMessages = await Promise.all(
|
|
539
|
+
childrenToConversationMessage(props.children).map(async (message) => {
|
|
540
|
+
const partiallyRendered = {
|
|
541
|
+
...message,
|
|
542
|
+
content: await render(message.element)
|
|
543
|
+
};
|
|
544
|
+
return {
|
|
545
|
+
...partiallyRendered,
|
|
546
|
+
tokens: tokenCountForConversationMessage(partiallyRendered)
|
|
547
|
+
};
|
|
548
|
+
})
|
|
549
|
+
);
|
|
550
|
+
const chatMessages = renderedMessages.map((m) => {
|
|
551
|
+
return {
|
|
552
|
+
content: m.content,
|
|
553
|
+
role: m.type
|
|
554
|
+
};
|
|
555
|
+
});
|
|
556
|
+
const chatCompletionRequest = {
|
|
557
|
+
model: props.model,
|
|
558
|
+
max_tokens: props.maxTokens,
|
|
559
|
+
temperature: props.temperature,
|
|
560
|
+
messages: chatMessages,
|
|
561
|
+
stream: true
|
|
562
|
+
};
|
|
563
|
+
const logRequestData = {
|
|
564
|
+
startTime,
|
|
565
|
+
model: props.model,
|
|
566
|
+
provider: props.provider,
|
|
567
|
+
providerRegion: props.providerRegion,
|
|
568
|
+
inputMessages: renderedMessages,
|
|
569
|
+
request: chatCompletionRequest
|
|
570
|
+
};
|
|
571
|
+
logger.chatCompletionRequest("openai", logRequestData);
|
|
572
|
+
let chatResponse;
|
|
573
|
+
try {
|
|
574
|
+
chatResponse = await client.chat.completions.create(chatCompletionRequest);
|
|
575
|
+
} catch (ex) {
|
|
576
|
+
if (ex instanceof OpenAIClient.APIError) {
|
|
577
|
+
throw new ChatCompletionError(
|
|
578
|
+
`OpenAIClient.APIError: ${ex.message}`,
|
|
579
|
+
logRequestData
|
|
580
|
+
);
|
|
581
|
+
} else if (ex instanceof Error) {
|
|
582
|
+
throw new ChatCompletionError(ex.message, logRequestData);
|
|
583
|
+
}
|
|
584
|
+
throw ex;
|
|
585
|
+
}
|
|
586
|
+
let finishReason = void 0;
|
|
587
|
+
let content = "";
|
|
588
|
+
for await (const message of chatResponse) {
|
|
589
|
+
if (!message.choices || !message.choices[0]) {
|
|
590
|
+
continue;
|
|
591
|
+
}
|
|
592
|
+
const delta = message.choices[0].delta;
|
|
593
|
+
if (message.choices[0].finish_reason) {
|
|
594
|
+
finishReason = message.choices[0].finish_reason;
|
|
595
|
+
}
|
|
596
|
+
if (delta.content) {
|
|
597
|
+
content += delta.content;
|
|
598
|
+
yield delta.content;
|
|
599
|
+
}
|
|
600
|
+
}
|
|
601
|
+
const outputMessage = {
|
|
602
|
+
type: "assistant",
|
|
603
|
+
element: /* @__PURE__ */ jsx(AssistantMessage, { children: content }),
|
|
604
|
+
content,
|
|
605
|
+
tokens: tokenCountForConversationMessage({
|
|
606
|
+
type: "assistant",
|
|
607
|
+
content
|
|
608
|
+
})
|
|
609
|
+
};
|
|
610
|
+
const responseData = {
|
|
611
|
+
...logRequestData,
|
|
612
|
+
finishReason,
|
|
613
|
+
latency: performance.now() - startTime,
|
|
614
|
+
outputMessage,
|
|
615
|
+
tokensUsed: computeUsage([...renderedMessages, outputMessage])
|
|
616
|
+
};
|
|
617
|
+
logger.chatCompletionResponse("openai", responseData);
|
|
618
|
+
}
|
|
619
|
+
|
|
620
|
+
// src/lib/openai/index.ts
|
|
621
|
+
import { OpenAI as OpenAIClient2 } from "openai";
|
|
622
|
+
|
|
623
|
+
// src/lib/anthropic/Anthropic.tsx
|
|
624
|
+
import AnthropicClient from "@anthropic-ai/sdk";
|
|
625
|
+
import { countTokens } from "@anthropic-ai/tokenizer";
|
|
626
|
+
var defaultClient2 = null;
|
|
627
|
+
var AnthropicClientContext = createContext(
|
|
628
|
+
() => {
|
|
629
|
+
if (defaultClient2) {
|
|
630
|
+
return defaultClient2;
|
|
631
|
+
}
|
|
632
|
+
defaultClient2 = new AnthropicClient({
|
|
633
|
+
apiKey: getEnvVar("ANTHROPIC_API_KEY", false)
|
|
634
|
+
});
|
|
635
|
+
return defaultClient2;
|
|
636
|
+
}
|
|
637
|
+
);
|
|
638
|
+
var defaultMaxTokens = 4096;
|
|
639
|
+
async function* AnthropicChatCompletion(props, { render, logger, getContext }) {
|
|
640
|
+
const startTime = performance.now();
|
|
641
|
+
const client = getContext(AnthropicClientContext)();
|
|
642
|
+
if (!client) {
|
|
643
|
+
throw new Error(
|
|
644
|
+
"[AnthropicChatCompletion] must supply AnthropicClient via context"
|
|
645
|
+
);
|
|
646
|
+
}
|
|
647
|
+
const renderedMessages = await Promise.all(
|
|
648
|
+
childrenToConversationMessage(props.children).flatMap((message) => {
|
|
649
|
+
if (message.type === "system") {
|
|
650
|
+
return [
|
|
651
|
+
{
|
|
652
|
+
type: "user",
|
|
653
|
+
element: /* @__PURE__ */ jsxs(UserMessage, { children: [
|
|
654
|
+
"For subsequent replies you will adhere to the following instructions: ",
|
|
655
|
+
message.element
|
|
656
|
+
] })
|
|
657
|
+
},
|
|
658
|
+
{
|
|
659
|
+
type: "assistant",
|
|
660
|
+
element: /* @__PURE__ */ jsx(AssistantMessage, { children: "Okay, I will do that." })
|
|
661
|
+
}
|
|
662
|
+
];
|
|
663
|
+
}
|
|
664
|
+
return [message];
|
|
665
|
+
}).map(async (message) => {
|
|
666
|
+
const prefix = message.type === "user" ? AnthropicClient.HUMAN_PROMPT : AnthropicClient.AI_PROMPT;
|
|
667
|
+
const rendered = await render(message.element);
|
|
668
|
+
const content2 = `${prefix} ${rendered.trim()}`;
|
|
669
|
+
return {
|
|
670
|
+
...message,
|
|
671
|
+
content: content2,
|
|
672
|
+
tokens: countTokens(content2)
|
|
673
|
+
};
|
|
674
|
+
})
|
|
675
|
+
);
|
|
676
|
+
const chatMessages = renderedMessages.map((m) => {
|
|
677
|
+
return m.content;
|
|
678
|
+
});
|
|
679
|
+
chatMessages.push(AnthropicClient.AI_PROMPT);
|
|
680
|
+
const anthropicCompletionRequest = {
|
|
681
|
+
prompt: chatMessages.join("\n\n"),
|
|
682
|
+
max_tokens_to_sample: props.maxTokens ?? defaultMaxTokens,
|
|
683
|
+
temperature: props.temperature,
|
|
684
|
+
model: props.model,
|
|
685
|
+
stream: true
|
|
686
|
+
};
|
|
687
|
+
const logRequestData = {
|
|
688
|
+
startTime,
|
|
689
|
+
model: props.model,
|
|
690
|
+
provider: props.provider,
|
|
691
|
+
providerRegion: props.providerRegion,
|
|
692
|
+
inputMessages: renderedMessages,
|
|
693
|
+
request: anthropicCompletionRequest
|
|
694
|
+
};
|
|
695
|
+
logger.chatCompletionRequest("anthropic", logRequestData);
|
|
696
|
+
let response;
|
|
697
|
+
try {
|
|
698
|
+
response = await client.completions.create(anthropicCompletionRequest);
|
|
699
|
+
} catch (err) {
|
|
700
|
+
if (err instanceof AnthropicClient.APIError) {
|
|
701
|
+
throw new ChatCompletionError(
|
|
702
|
+
`AnthropicClient.APIError: ${err.message}`,
|
|
703
|
+
logRequestData
|
|
704
|
+
);
|
|
705
|
+
} else if (err instanceof Error) {
|
|
706
|
+
throw new ChatCompletionError(err.message, logRequestData);
|
|
707
|
+
}
|
|
708
|
+
throw err;
|
|
709
|
+
}
|
|
710
|
+
let content = "";
|
|
711
|
+
let isFirstResponse = true;
|
|
712
|
+
for await (const completion of response) {
|
|
713
|
+
let text = completion.completion;
|
|
714
|
+
if (isFirstResponse && text.length > 0) {
|
|
715
|
+
isFirstResponse = false;
|
|
716
|
+
if (text.startsWith(" ")) {
|
|
717
|
+
text = text.slice(1);
|
|
718
|
+
}
|
|
719
|
+
}
|
|
720
|
+
content += text;
|
|
721
|
+
yield text;
|
|
722
|
+
}
|
|
723
|
+
const outputMessage = {
|
|
724
|
+
type: "assistant",
|
|
725
|
+
element: /* @__PURE__ */ jsx(AssistantMessage, { children: content }),
|
|
726
|
+
content,
|
|
727
|
+
tokens: countTokens(content)
|
|
728
|
+
};
|
|
729
|
+
const responseData = {
|
|
730
|
+
...logRequestData,
|
|
731
|
+
finishReason: "stop",
|
|
732
|
+
latency: performance.now() - startTime,
|
|
733
|
+
outputMessage,
|
|
734
|
+
tokensUsed: computeUsage([...renderedMessages, outputMessage])
|
|
735
|
+
};
|
|
736
|
+
logger.chatCompletionResponse("anthropic", responseData);
|
|
737
|
+
}
|
|
738
|
+
|
|
739
|
+
// src/lib/anthropic/index.ts
|
|
740
|
+
import AnthropicClient2 from "@anthropic-ai/sdk";
|
|
741
|
+
import { countTokens as countAnthropicTokens } from "@anthropic-ai/tokenizer";
|
|
742
|
+
export {
|
|
743
|
+
AIFragment,
|
|
744
|
+
AnthropicChatCompletion,
|
|
745
|
+
AnthropicClient2 as AnthropicClient,
|
|
746
|
+
AnthropicClientContext,
|
|
747
|
+
AssistantMessage,
|
|
748
|
+
BoundLogger,
|
|
749
|
+
ChatCompletionError,
|
|
750
|
+
CombinedLogger,
|
|
751
|
+
ConsoleLogger,
|
|
752
|
+
LogImplementation,
|
|
753
|
+
LoggerContext,
|
|
754
|
+
NoopLogImplementation,
|
|
755
|
+
OpenAIChatCompletion,
|
|
756
|
+
OpenAIClient2 as OpenAIClient,
|
|
757
|
+
OpenAIClientContext,
|
|
758
|
+
SystemMessage,
|
|
759
|
+
UserMessage,
|
|
760
|
+
attachedContextSymbol,
|
|
761
|
+
childrenToConversationMessage,
|
|
762
|
+
computeUsage,
|
|
763
|
+
countAnthropicTokens,
|
|
764
|
+
createAIElement,
|
|
765
|
+
createContext,
|
|
766
|
+
createRenderContext,
|
|
767
|
+
defaultMaxTokens,
|
|
768
|
+
tokenCountForConversationMessage,
|
|
769
|
+
tokenLimitForChatModel,
|
|
770
|
+
tokenizer
|
|
771
|
+
};
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
var __defProp = Object.defineProperty;
|
|
2
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
3
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
4
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
5
|
+
var __export = (target, all) => {
|
|
6
|
+
for (var name in all)
|
|
7
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
8
|
+
};
|
|
9
|
+
var __copyProps = (to, from, except, desc) => {
|
|
10
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
11
|
+
for (let key of __getOwnPropNames(from))
|
|
12
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
13
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
14
|
+
}
|
|
15
|
+
return to;
|
|
16
|
+
};
|
|
17
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
18
|
+
|
|
19
|
+
// src/jsx-dev-runtime.ts
|
|
20
|
+
var jsx_dev_runtime_exports = {};
|
|
21
|
+
__export(jsx_dev_runtime_exports, {
|
|
22
|
+
Fragment: () => Fragment,
|
|
23
|
+
jsx: () => jsx,
|
|
24
|
+
jsxDEV: () => jsxDEV,
|
|
25
|
+
jsxs: () => jsxs
|
|
26
|
+
});
|
|
27
|
+
module.exports = __toCommonJS(jsx_dev_runtime_exports);
|
|
28
|
+
|
|
29
|
+
// src/createElement.ts
|
|
30
|
+
function createAIElement(tag, props, ...children) {
|
|
31
|
+
const propsToPass = {
|
|
32
|
+
...props ?? {},
|
|
33
|
+
...children.length === 0 ? {} : { children: children.length === 1 ? children[0] : children }
|
|
34
|
+
};
|
|
35
|
+
const result = {
|
|
36
|
+
tag,
|
|
37
|
+
props: propsToPass,
|
|
38
|
+
render: (ctx) => {
|
|
39
|
+
return tag(propsToPass, ctx);
|
|
40
|
+
}
|
|
41
|
+
};
|
|
42
|
+
return result;
|
|
43
|
+
}
|
|
44
|
+
function AIFragment({ children }) {
|
|
45
|
+
return children;
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
// src/jsx-runtime.ts
|
|
49
|
+
function jsx(type, config, maybeKey) {
|
|
50
|
+
const configWithKey = maybeKey !== void 0 ? { ...config, key: maybeKey } : config;
|
|
51
|
+
const children = config && Array.isArray(config.children) ? config.children : [];
|
|
52
|
+
return createAIElement(type, configWithKey, ...children);
|
|
53
|
+
}
|
|
54
|
+
var jsxDEV = jsx;
|
|
55
|
+
var jsxs = jsx;
|
|
56
|
+
var Fragment = AIFragment;
|
|
57
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
58
|
+
0 && (module.exports = {
|
|
59
|
+
Fragment,
|
|
60
|
+
jsx,
|
|
61
|
+
jsxDEV,
|
|
62
|
+
jsxs
|
|
63
|
+
});
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
import { u as AIComponent, A as AIElement, b as AIFragment } from './createElement-Q_LxUYf8.mjs';
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* The is used as an import source for ts/js files as the JSX transpile functinos
|
|
5
|
+
*/
|
|
6
|
+
|
|
7
|
+
/** @hidden */
|
|
8
|
+
declare namespace JSX {
|
|
9
|
+
type ElementType = AIComponent<any>;
|
|
10
|
+
interface Element extends AIElement<any> {
|
|
11
|
+
}
|
|
12
|
+
interface IntrinsicElements {
|
|
13
|
+
}
|
|
14
|
+
interface ElementChildrenAttribute {
|
|
15
|
+
children: {};
|
|
16
|
+
}
|
|
17
|
+
}
|
|
18
|
+
/** @hidden */
|
|
19
|
+
declare function jsx(type: any, config: any, maybeKey?: any): AIElement<{
|
|
20
|
+
children: any[];
|
|
21
|
+
}>;
|
|
22
|
+
/** @hidden */
|
|
23
|
+
declare const jsxDEV: typeof jsx;
|
|
24
|
+
/** @hidden */
|
|
25
|
+
declare const jsxs: typeof jsx;
|
|
26
|
+
/** @hidden */
|
|
27
|
+
declare const Fragment: typeof AIFragment;
|
|
28
|
+
|
|
29
|
+
export { Fragment, JSX, jsx, jsxDEV, jsxs };
|
package/package.json
CHANGED
|
@@ -1,16 +1,17 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@gammatech/aijsx",
|
|
3
|
-
"version": "0.1.
|
|
3
|
+
"version": "0.1.2",
|
|
4
4
|
"description": "Rewrite of aijsx",
|
|
5
5
|
"author": "Jordan Garcia",
|
|
6
6
|
"license": "MIT",
|
|
7
7
|
"main": "./dist/index.js",
|
|
8
|
+
"module": "./dist/index.mjs",
|
|
8
9
|
"types": "./dist/index.d.ts",
|
|
9
10
|
"scripts": {
|
|
10
11
|
"dev": "ts-node ./src/test.tsx",
|
|
11
12
|
"test": "jest --verbose",
|
|
12
13
|
"test:watch": "jest --watch --verbose",
|
|
13
|
-
"build": "yarn check-types && yarn clean-symlinks && tsup
|
|
14
|
+
"build": "yarn check-types && yarn clean-symlinks && tsup",
|
|
14
15
|
"clean-symlinks": "rm ./jsx-* || true",
|
|
15
16
|
"symlink": "ln -s ./dist/jsx-runtime.js . && ln -s ./dist/jsx-runtime.d.ts && ln -s ./dist/jsx-runtime.js ./jsx-dev-runtime.js && ln -s ./dist/jsx-runtime.d.ts ./jsx-dev-runtime.d.ts",
|
|
16
17
|
"prepublishOnly": "yarn build",
|
|
@@ -50,10 +51,7 @@
|
|
|
50
51
|
"files": [
|
|
51
52
|
"dist",
|
|
52
53
|
"README.md",
|
|
53
|
-
"
|
|
54
|
-
"jsx-runtime.ts",
|
|
55
|
-
"jsx-dev-runtime.d.ts",
|
|
56
|
-
"jsx-dev-runtime.ts"
|
|
54
|
+
"LICENSE"
|
|
57
55
|
],
|
|
58
56
|
"jest": {
|
|
59
57
|
"moduleFileExtensions": [
|