@gammatech/aijsx 0.1.3 → 0.2.0-beta.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -14,7 +14,7 @@ function createAIElement(tag, props, ...children) {
14
14
  return result;
15
15
  }
16
16
  function isAIElement(value) {
17
- return value !== null && typeof value === "object" && "tag" in value;
17
+ return value !== null && typeof value === "object" && "tag" in value && "render" in value;
18
18
  }
19
19
  function isLiteral(value) {
20
20
  return typeof value === "string" || typeof value === "number" || typeof value === "undefined" || typeof value === "boolean" || // capture null + undefined
@@ -24,23 +24,9 @@ function AIFragment({ children }) {
24
24
  return children;
25
25
  }
26
26
 
27
- // src/jsx-runtime.ts
28
- function jsx(type, config, maybeKey) {
29
- const configWithKey = maybeKey !== void 0 ? { ...config, key: maybeKey } : config;
30
- const children = config && Array.isArray(config.children) ? config.children : [];
31
- return createAIElement(type, configWithKey, ...children);
32
- }
33
- var jsxDEV = jsx;
34
- var jsxs = jsx;
35
- var Fragment = AIFragment;
36
-
37
27
  export {
38
28
  createAIElement,
39
29
  isAIElement,
40
30
  isLiteral,
41
- AIFragment,
42
- jsx,
43
- jsxDEV,
44
- jsxs,
45
- Fragment
31
+ AIFragment
46
32
  };
@@ -0,0 +1,21 @@
1
+ import {
2
+ AIFragment,
3
+ createAIElement
4
+ } from "./chunk-UMN5F5A5.mjs";
5
+
6
+ // src/jsx-runtime.ts
7
+ function jsx(type, config, maybeKey) {
8
+ const configWithKey = maybeKey !== void 0 ? { ...config, key: maybeKey } : config;
9
+ const children = config && Array.isArray(config.children) ? config.children : [];
10
+ return createAIElement(type, configWithKey, ...children);
11
+ }
12
+ var jsxDEV = jsx;
13
+ var jsxs = jsx;
14
+ var Fragment = AIFragment;
15
+
16
+ export {
17
+ jsx,
18
+ jsxDEV,
19
+ jsxs,
20
+ Fragment
21
+ };
@@ -30,13 +30,21 @@ type Renderable = AINode | PromiseLike<Renderable> | RenderableStream;
30
30
  type PropsOfAIComponent<T extends AIComponent<any>> = T extends AIComponent<infer P> ? P : never;
31
31
 
32
32
  declare const LoggerContext: Context<LogImplementation>;
33
+ type RenderOptions = {
34
+ preserveTags?: boolean;
35
+ renderedProps?: {
36
+ [tagName: string]: {
37
+ [propName: string]: boolean;
38
+ };
39
+ };
40
+ };
33
41
  interface RenderContext {
34
42
  parentContext: RenderContext | null;
35
43
  element: AIElement<any>;
36
44
  renderId: string;
37
45
  logger: Logger;
38
46
  getContext<T>(context: Context<T>): T;
39
- render(renderable: Renderable): RenderResult;
47
+ render(renderable: Renderable, opts?: RenderOptions): RenderResult;
40
48
  }
41
49
  declare function createContext<T>(defaultValue: T): Context<T>;
42
50
 
@@ -126,16 +134,11 @@ declare const UserMessage: (props: {
126
134
  declare const AssistantMessage: (props: {
127
135
  children: AINode;
128
136
  }) => AINode;
129
- interface ConversationMessageType<T extends ChatCompletionRole, C extends AIComponent<any>> {
130
- type: T;
131
- element: AIElement<PropsOfAIComponent<C>>;
132
- }
133
- type ConversationMessage = ConversationMessageType<'user', typeof UserMessage> | ConversationMessageType<'assistant', typeof AssistantMessage> | ConversationMessageType<'system', typeof SystemMessage>;
134
- type RenderedConversationMessage = ConversationMessage & {
137
+ type RenderedConversationMessage = {
138
+ role: ChatCompletionRole;
135
139
  content: string;
136
140
  tokens: number;
137
141
  };
138
- declare const childrenToConversationMessage: (c: AIElement<any> | AIElement<any>[]) => ConversationMessage[];
139
142
  declare const computeUsage: (messages: RenderedConversationMessage[]) => {
140
143
  prompt: number;
141
144
  completion: number;
@@ -156,4 +159,4 @@ declare function AIFragment({ children }: {
156
159
  children: AINode;
157
160
  }): Renderable;
158
161
 
159
- export { type AIElement as A, BoundLogger as B, type Context as C, LogImplementation as L, NoopLogImplementation as N, type PropsOfAIComponent as P, type RenderContext as R, SystemMessage as S, UserMessage as U, type RenderedConversationMessage as a, AIFragment as b, createAIElement as c, LoggerContext as d, createContext as e, AssistantMessage as f, type ConversationMessage as g, childrenToConversationMessage as h, computeUsage as i, ChatCompletionError as j, type ChatCompletionRequestPayloads as k, type LogChatCompletionRequest as l, type LogChatCompletionResponse as m, type LogLevel as n, type Logger as o, ConsoleLogger as p, CombinedLogger as q, type Literal as r, type RenderableStream as s, type RenderResult as t, type AIComponent as u, attachedContextSymbol as v, type AINode as w, type Renderable as x };
162
+ export { type AINode as A, BoundLogger as B, type Context as C, LogImplementation as L, NoopLogImplementation as N, type PropsOfAIComponent as P, type RenderContext as R, SystemMessage as S, UserMessage as U, AIFragment as a, LoggerContext as b, createAIElement as c, createContext as d, type ChatCompletionRole as e, AssistantMessage as f, type RenderedConversationMessage as g, computeUsage as h, ChatCompletionError as i, type ChatCompletionRequestPayloads as j, type LogChatCompletionRequest as k, type LogChatCompletionResponse as l, type LogLevel as m, type Logger as n, ConsoleLogger as o, CombinedLogger as p, type Literal as q, type RenderableStream as r, type RenderResult as s, type AIComponent as t, attachedContextSymbol as u, type AIElement as v, type Renderable as w };
@@ -30,13 +30,21 @@ type Renderable = AINode | PromiseLike<Renderable> | RenderableStream;
30
30
  type PropsOfAIComponent<T extends AIComponent<any>> = T extends AIComponent<infer P> ? P : never;
31
31
 
32
32
  declare const LoggerContext: Context<LogImplementation>;
33
+ type RenderOptions = {
34
+ preserveTags?: boolean;
35
+ renderedProps?: {
36
+ [tagName: string]: {
37
+ [propName: string]: boolean;
38
+ };
39
+ };
40
+ };
33
41
  interface RenderContext {
34
42
  parentContext: RenderContext | null;
35
43
  element: AIElement<any>;
36
44
  renderId: string;
37
45
  logger: Logger;
38
46
  getContext<T>(context: Context<T>): T;
39
- render(renderable: Renderable): RenderResult;
47
+ render(renderable: Renderable, opts?: RenderOptions): RenderResult;
40
48
  }
41
49
  declare function createContext<T>(defaultValue: T): Context<T>;
42
50
 
@@ -126,16 +134,11 @@ declare const UserMessage: (props: {
126
134
  declare const AssistantMessage: (props: {
127
135
  children: AINode;
128
136
  }) => AINode;
129
- interface ConversationMessageType<T extends ChatCompletionRole, C extends AIComponent<any>> {
130
- type: T;
131
- element: AIElement<PropsOfAIComponent<C>>;
132
- }
133
- type ConversationMessage = ConversationMessageType<'user', typeof UserMessage> | ConversationMessageType<'assistant', typeof AssistantMessage> | ConversationMessageType<'system', typeof SystemMessage>;
134
- type RenderedConversationMessage = ConversationMessage & {
137
+ type RenderedConversationMessage = {
138
+ role: ChatCompletionRole;
135
139
  content: string;
136
140
  tokens: number;
137
141
  };
138
- declare const childrenToConversationMessage: (c: AIElement<any> | AIElement<any>[]) => ConversationMessage[];
139
142
  declare const computeUsage: (messages: RenderedConversationMessage[]) => {
140
143
  prompt: number;
141
144
  completion: number;
@@ -156,4 +159,4 @@ declare function AIFragment({ children }: {
156
159
  children: AINode;
157
160
  }): Renderable;
158
161
 
159
- export { type AIElement as A, BoundLogger as B, type Context as C, LogImplementation as L, NoopLogImplementation as N, type PropsOfAIComponent as P, type RenderContext as R, SystemMessage as S, UserMessage as U, type RenderedConversationMessage as a, AIFragment as b, createAIElement as c, LoggerContext as d, createContext as e, AssistantMessage as f, type ConversationMessage as g, childrenToConversationMessage as h, computeUsage as i, ChatCompletionError as j, type ChatCompletionRequestPayloads as k, type LogChatCompletionRequest as l, type LogChatCompletionResponse as m, type LogLevel as n, type Logger as o, ConsoleLogger as p, CombinedLogger as q, type Literal as r, type RenderableStream as s, type RenderResult as t, type AIComponent as u, attachedContextSymbol as v, type AINode as w, type Renderable as x };
162
+ export { type AINode as A, BoundLogger as B, type Context as C, LogImplementation as L, NoopLogImplementation as N, type PropsOfAIComponent as P, type RenderContext as R, SystemMessage as S, UserMessage as U, AIFragment as a, LoggerContext as b, createAIElement as c, createContext as d, type ChatCompletionRole as e, AssistantMessage as f, type RenderedConversationMessage as g, computeUsage as h, ChatCompletionError as i, type ChatCompletionRequestPayloads as j, type LogChatCompletionRequest as k, type LogChatCompletionResponse as l, type LogLevel as m, type Logger as n, ConsoleLogger as o, CombinedLogger as p, type Literal as q, type RenderableStream as r, type RenderResult as s, type AIComponent as t, attachedContextSymbol as u, type AIElement as v, type Renderable as w };
package/dist/index.d.mts CHANGED
@@ -1,7 +1,8 @@
1
- import { L as LogImplementation, R as RenderContext, C as Context, A as AIElement, a as RenderedConversationMessage } from './createElement-ms1wdmoH.mjs';
2
- export { u as AIComponent, b as AIFragment, w as AINode, f as AssistantMessage, B as BoundLogger, j as ChatCompletionError, k as ChatCompletionRequestPayloads, q as CombinedLogger, p as ConsoleLogger, g as ConversationMessage, r as Literal, l as LogChatCompletionRequest, m as LogChatCompletionResponse, n as LogLevel, o as Logger, d as LoggerContext, N as NoopLogImplementation, P as PropsOfAIComponent, t as RenderResult, x as Renderable, s as RenderableStream, S as SystemMessage, U as UserMessage, v as attachedContextSymbol, h as childrenToConversationMessage, i as computeUsage, c as createAIElement, e as createContext } from './createElement-ms1wdmoH.mjs';
1
+ import { L as LogImplementation, R as RenderContext, C as Context, A as AINode } from './createElement-YEuZ7P4l.mjs';
2
+ export { t as AIComponent, v as AIElement, a as AIFragment, f as AssistantMessage, B as BoundLogger, i as ChatCompletionError, j as ChatCompletionRequestPayloads, e as ChatCompletionRole, p as CombinedLogger, o as ConsoleLogger, q as Literal, k as LogChatCompletionRequest, l as LogChatCompletionResponse, m as LogLevel, n as Logger, b as LoggerContext, N as NoopLogImplementation, P as PropsOfAIComponent, s as RenderResult, w as Renderable, r as RenderableStream, g as RenderedConversationMessage, S as SystemMessage, U as UserMessage, u as attachedContextSymbol, h as computeUsage, c as createAIElement, d as createContext } from './createElement-YEuZ7P4l.mjs';
3
3
  import { OpenAI } from 'openai';
4
4
  export { OpenAI as OpenAIClient } from 'openai';
5
+ import { ChatCompletionSystemMessageParam, ChatCompletionUserMessageParam, ChatCompletionAssistantMessageParam } from 'openai/resources';
5
6
  import AnthropicClient from '@anthropic-ai/sdk';
6
7
  export { default as AnthropicClient } from '@anthropic-ai/sdk';
7
8
  export { countTokens as countAnthropicTokens } from '@anthropic-ai/tokenizer';
@@ -12,6 +13,7 @@ declare function createRenderContext({ logger, rootRenderId, }?: {
12
13
  }): RenderContext;
13
14
 
14
15
  type OpenAIChatCompletionRequest = OpenAI.Chat.Completions.ChatCompletionCreateParamsStreaming;
16
+ type OpenAIChatMessage = ChatCompletionSystemMessageParam | ChatCompletionUserMessageParam | ChatCompletionAssistantMessageParam;
15
17
  declare module '@gammatech/aijsx' {
16
18
  interface ChatCompletionRequestPayloads {
17
19
  openai: OpenAIChatCompletionRequest;
@@ -23,18 +25,34 @@ type OpenAIChatCompletionProps = {
23
25
  model: ValidOpenAIChatModel;
24
26
  maxTokens?: number;
25
27
  temperature?: number;
26
- children: AIElement<any> | AIElement<any>[];
28
+ children: AINode;
27
29
  provider?: string;
28
30
  providerRegion?: string;
29
31
  };
30
32
  declare function OpenAIChatCompletion(props: OpenAIChatCompletionProps, { logger, render, getContext }: RenderContext): AsyncGenerator<string, void, unknown>;
31
33
 
34
+ type ValidOpenAIVisionModel = 'gpt-4-vision-preview';
35
+ declare const ContentTypeImage: (_props: {
36
+ url: string;
37
+ detail?: 'auto' | 'high' | 'low';
38
+ }) => null;
39
+ type OpenAIVisionChatCompletionProps = {
40
+ model?: ValidOpenAIVisionModel;
41
+ maxTokens?: number;
42
+ temperature?: number;
43
+ children: AINode;
44
+ provider?: string;
45
+ providerRegion?: string;
46
+ };
47
+ declare function OpenAIVisionChatCompletion(props: OpenAIVisionChatCompletionProps, { logger, render, getContext }: RenderContext): AsyncGenerator<string, void, unknown>;
48
+
32
49
  declare const tokenizer: {
33
50
  encode: (text: string) => number[];
34
51
  decode: (tokens: number[]) => string;
35
52
  };
36
53
  declare function tokenLimitForChatModel(model: ValidOpenAIChatModel): number | undefined;
37
- declare function tokenCountForConversationMessage(message: Pick<RenderedConversationMessage, 'type' | 'content'>): number;
54
+ declare function tokenCountForOpenAIMessage(message: OpenAIChatMessage): number;
55
+ declare function tokenCountForOpenAIVisionMessage(message: OpenAIChatMessage): number;
38
56
 
39
57
  type AnthropicChatCompletionRequest = AnthropicClient.CompletionCreateParams;
40
58
  declare module '@gammatech/aijsx' {
@@ -56,7 +74,7 @@ type AnthropicChatCompletionProps = {
56
74
  model: ValidAnthropicChatModel;
57
75
  maxTokens?: number;
58
76
  temperature?: number;
59
- children: AIElement<any> | AIElement<any>[];
77
+ children: AINode;
60
78
  provider?: string;
61
79
  providerRegion?: string;
62
80
  };
@@ -69,4 +87,4 @@ type AnthropicChatCompletionProps = {
69
87
  */
70
88
  declare function AnthropicChatCompletion(props: AnthropicChatCompletionProps, { render, logger, getContext }: RenderContext): AsyncGenerator<string, void, unknown>;
71
89
 
72
- export { AIElement, AnthropicChatCompletion, type AnthropicChatCompletionRequest, AnthropicClientContext, Context, LogImplementation, OpenAIChatCompletion, type OpenAIChatCompletionRequest, OpenAIClientContext, RenderContext, RenderedConversationMessage, type ValidAnthropicChatModel, type ValidOpenAIChatModel, createRenderContext, defaultMaxTokens, tokenCountForConversationMessage, tokenLimitForChatModel, tokenizer };
90
+ export { AINode, AnthropicChatCompletion, type AnthropicChatCompletionRequest, AnthropicClientContext, ContentTypeImage, Context, LogImplementation, OpenAIChatCompletion, type OpenAIChatCompletionRequest, type OpenAIChatMessage, OpenAIClientContext, OpenAIVisionChatCompletion, RenderContext, type ValidAnthropicChatModel, type ValidOpenAIChatModel, type ValidOpenAIVisionModel, createRenderContext, defaultMaxTokens, tokenCountForOpenAIMessage, tokenCountForOpenAIVisionMessage, tokenLimitForChatModel, tokenizer };
package/dist/index.d.ts CHANGED
@@ -1,7 +1,8 @@
1
- import { L as LogImplementation, R as RenderContext, C as Context, A as AIElement, a as RenderedConversationMessage } from './createElement-ms1wdmoH.js';
2
- export { u as AIComponent, b as AIFragment, w as AINode, f as AssistantMessage, B as BoundLogger, j as ChatCompletionError, k as ChatCompletionRequestPayloads, q as CombinedLogger, p as ConsoleLogger, g as ConversationMessage, r as Literal, l as LogChatCompletionRequest, m as LogChatCompletionResponse, n as LogLevel, o as Logger, d as LoggerContext, N as NoopLogImplementation, P as PropsOfAIComponent, t as RenderResult, x as Renderable, s as RenderableStream, S as SystemMessage, U as UserMessage, v as attachedContextSymbol, h as childrenToConversationMessage, i as computeUsage, c as createAIElement, e as createContext } from './createElement-ms1wdmoH.js';
1
+ import { L as LogImplementation, R as RenderContext, C as Context, A as AINode } from './createElement-YEuZ7P4l.js';
2
+ export { t as AIComponent, v as AIElement, a as AIFragment, f as AssistantMessage, B as BoundLogger, i as ChatCompletionError, j as ChatCompletionRequestPayloads, e as ChatCompletionRole, p as CombinedLogger, o as ConsoleLogger, q as Literal, k as LogChatCompletionRequest, l as LogChatCompletionResponse, m as LogLevel, n as Logger, b as LoggerContext, N as NoopLogImplementation, P as PropsOfAIComponent, s as RenderResult, w as Renderable, r as RenderableStream, g as RenderedConversationMessage, S as SystemMessage, U as UserMessage, u as attachedContextSymbol, h as computeUsage, c as createAIElement, d as createContext } from './createElement-YEuZ7P4l.js';
3
3
  import { OpenAI } from 'openai';
4
4
  export { OpenAI as OpenAIClient } from 'openai';
5
+ import { ChatCompletionSystemMessageParam, ChatCompletionUserMessageParam, ChatCompletionAssistantMessageParam } from 'openai/resources';
5
6
  import AnthropicClient from '@anthropic-ai/sdk';
6
7
  export { default as AnthropicClient } from '@anthropic-ai/sdk';
7
8
  export { countTokens as countAnthropicTokens } from '@anthropic-ai/tokenizer';
@@ -12,6 +13,7 @@ declare function createRenderContext({ logger, rootRenderId, }?: {
12
13
  }): RenderContext;
13
14
 
14
15
  type OpenAIChatCompletionRequest = OpenAI.Chat.Completions.ChatCompletionCreateParamsStreaming;
16
+ type OpenAIChatMessage = ChatCompletionSystemMessageParam | ChatCompletionUserMessageParam | ChatCompletionAssistantMessageParam;
15
17
  declare module '@gammatech/aijsx' {
16
18
  interface ChatCompletionRequestPayloads {
17
19
  openai: OpenAIChatCompletionRequest;
@@ -23,18 +25,34 @@ type OpenAIChatCompletionProps = {
23
25
  model: ValidOpenAIChatModel;
24
26
  maxTokens?: number;
25
27
  temperature?: number;
26
- children: AIElement<any> | AIElement<any>[];
28
+ children: AINode;
27
29
  provider?: string;
28
30
  providerRegion?: string;
29
31
  };
30
32
  declare function OpenAIChatCompletion(props: OpenAIChatCompletionProps, { logger, render, getContext }: RenderContext): AsyncGenerator<string, void, unknown>;
31
33
 
34
+ type ValidOpenAIVisionModel = 'gpt-4-vision-preview';
35
+ declare const ContentTypeImage: (_props: {
36
+ url: string;
37
+ detail?: 'auto' | 'high' | 'low';
38
+ }) => null;
39
+ type OpenAIVisionChatCompletionProps = {
40
+ model?: ValidOpenAIVisionModel;
41
+ maxTokens?: number;
42
+ temperature?: number;
43
+ children: AINode;
44
+ provider?: string;
45
+ providerRegion?: string;
46
+ };
47
+ declare function OpenAIVisionChatCompletion(props: OpenAIVisionChatCompletionProps, { logger, render, getContext }: RenderContext): AsyncGenerator<string, void, unknown>;
48
+
32
49
  declare const tokenizer: {
33
50
  encode: (text: string) => number[];
34
51
  decode: (tokens: number[]) => string;
35
52
  };
36
53
  declare function tokenLimitForChatModel(model: ValidOpenAIChatModel): number | undefined;
37
- declare function tokenCountForConversationMessage(message: Pick<RenderedConversationMessage, 'type' | 'content'>): number;
54
+ declare function tokenCountForOpenAIMessage(message: OpenAIChatMessage): number;
55
+ declare function tokenCountForOpenAIVisionMessage(message: OpenAIChatMessage): number;
38
56
 
39
57
  type AnthropicChatCompletionRequest = AnthropicClient.CompletionCreateParams;
40
58
  declare module '@gammatech/aijsx' {
@@ -56,7 +74,7 @@ type AnthropicChatCompletionProps = {
56
74
  model: ValidAnthropicChatModel;
57
75
  maxTokens?: number;
58
76
  temperature?: number;
59
- children: AIElement<any> | AIElement<any>[];
77
+ children: AINode;
60
78
  provider?: string;
61
79
  providerRegion?: string;
62
80
  };
@@ -69,4 +87,4 @@ type AnthropicChatCompletionProps = {
69
87
  */
70
88
  declare function AnthropicChatCompletion(props: AnthropicChatCompletionProps, { render, logger, getContext }: RenderContext): AsyncGenerator<string, void, unknown>;
71
89
 
72
- export { AIElement, AnthropicChatCompletion, type AnthropicChatCompletionRequest, AnthropicClientContext, Context, LogImplementation, OpenAIChatCompletion, type OpenAIChatCompletionRequest, OpenAIClientContext, RenderContext, RenderedConversationMessage, type ValidAnthropicChatModel, type ValidOpenAIChatModel, createRenderContext, defaultMaxTokens, tokenCountForConversationMessage, tokenLimitForChatModel, tokenizer };
90
+ export { AINode, AnthropicChatCompletion, type AnthropicChatCompletionRequest, AnthropicClientContext, ContentTypeImage, Context, LogImplementation, OpenAIChatCompletion, type OpenAIChatCompletionRequest, type OpenAIChatMessage, OpenAIClientContext, OpenAIVisionChatCompletion, RenderContext, type ValidAnthropicChatModel, type ValidOpenAIChatModel, type ValidOpenAIVisionModel, createRenderContext, defaultMaxTokens, tokenCountForOpenAIMessage, tokenCountForOpenAIVisionMessage, tokenLimitForChatModel, tokenizer };