@gravity-ui/aikit 1.3.5 → 1.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (49) hide show
  1. package/dist/adapters/index.d.ts +1 -0
  2. package/dist/adapters/index.js +1 -0
  3. package/dist/adapters/openai/helpers/applyContentUpdate.d.ts +3 -0
  4. package/dist/adapters/openai/helpers/applyContentUpdate.js +113 -0
  5. package/dist/adapters/openai/helpers/buildFinalMessages.d.ts +11 -0
  6. package/dist/adapters/openai/helpers/buildFinalMessages.js +27 -0
  7. package/dist/adapters/openai/helpers/consumeOpenAIStream.d.ts +12 -0
  8. package/dist/adapters/openai/helpers/consumeOpenAIStream.js +101 -0
  9. package/dist/adapters/openai/helpers/contentPartsToMessageContent.d.ts +2 -0
  10. package/dist/adapters/openai/helpers/contentPartsToMessageContent.js +7 -0
  11. package/dist/adapters/openai/helpers/eventTypeUtils.d.ts +4 -0
  12. package/dist/adapters/openai/helpers/eventTypeUtils.js +17 -0
  13. package/dist/adapters/openai/helpers/fetchResponseToStreamEvents.d.ts +3 -0
  14. package/dist/adapters/openai/helpers/fetchResponseToStreamEvents.js +64 -0
  15. package/dist/adapters/openai/helpers/getStreamErrorMessage.d.ts +1 -0
  16. package/dist/adapters/openai/helpers/getStreamErrorMessage.js +12 -0
  17. package/dist/adapters/openai/helpers/getStreamEventContentUpdate.d.ts +41 -0
  18. package/dist/adapters/openai/helpers/getStreamEventContentUpdate.js +246 -0
  19. package/dist/adapters/openai/helpers/getTextDeltaFromStreamEvent.d.ts +3 -0
  20. package/dist/adapters/openai/helpers/getTextDeltaFromStreamEvent.js +34 -0
  21. package/dist/adapters/openai/helpers/isFetchResponse.d.ts +2 -0
  22. package/dist/adapters/openai/helpers/isFetchResponse.js +8 -0
  23. package/dist/adapters/openai/helpers/isMessageOutputItemDoneEvent.d.ts +3 -0
  24. package/dist/adapters/openai/helpers/isMessageOutputItemDoneEvent.js +18 -0
  25. package/dist/adapters/openai/helpers/isOutputItemDoneEvent.d.ts +2 -0
  26. package/dist/adapters/openai/helpers/isOutputItemDoneEvent.js +10 -0
  27. package/dist/adapters/openai/helpers/isOutputTextOrContentPartDone.d.ts +2 -0
  28. package/dist/adapters/openai/helpers/isOutputTextOrContentPartDone.js +10 -0
  29. package/dist/adapters/openai/helpers/isStreamEndOrErrorEvent.d.ts +2 -0
  30. package/dist/adapters/openai/helpers/isStreamEndOrErrorEvent.js +11 -0
  31. package/dist/adapters/openai/helpers/mapOutputToContent.d.ts +4 -0
  32. package/dist/adapters/openai/helpers/mapOutputToContent.js +73 -0
  33. package/dist/adapters/openai/helpers/openAIResponseToMessages.d.ts +4 -0
  34. package/dist/adapters/openai/helpers/openAIResponseToMessages.js +20 -0
  35. package/dist/adapters/openai/index.d.ts +10 -0
  36. package/dist/adapters/openai/index.js +7 -0
  37. package/dist/adapters/openai/types/index.d.ts +20 -0
  38. package/dist/adapters/openai/types/index.js +1 -0
  39. package/dist/adapters/openai/types/openAiTypes.d.ts +213 -0
  40. package/dist/adapters/openai/types/openAiTypes.js +1 -0
  41. package/dist/adapters/openai/useOpenAIResponsesAdapter.d.ts +12 -0
  42. package/dist/adapters/openai/useOpenAIResponsesAdapter.js +83 -0
  43. package/dist/components/organisms/MessageList/MessageList.js +2 -2
  44. package/dist/components/organisms/PromptInput/usePromptInput.js +2 -0
  45. package/dist/components/pages/ChatContainer/__stories__/ChatContainer.stories.js +12 -2
  46. package/dist/index.d.ts +1 -0
  47. package/dist/index.js +2 -0
  48. package/dist/types/chat.d.ts +1 -1
  49. package/package.json +4 -1
@@ -0,0 +1,213 @@
1
+ /**
2
+ * OpenAI Responses API types (source of truth: openai SDK, optionalDependencies).
3
+ * Compatible with openai ^6.x (Responses API).
4
+ * Import path: openai/resources/responses/responses.
5
+ * We re-use SDK types where possible; permissive variants (optional fields) only for
6
+ * raw SSE and partial API responses.
7
+ */
8
+ import type { ResponseOutputRefusal, ResponseOutputText } from 'openai/resources/responses/responses';
9
+ /** Minimal response shape (Response subset); all optional for partial/SSE. */
10
+ export type OpenAIResponseLike = {
11
+ id?: string;
12
+ error?: {
13
+ code?: string;
14
+ message?: string;
15
+ } | null;
16
+ metadata?: Record<string, string> | null;
17
+ output?: OpenAIResponseOutputItem[] | null;
18
+ };
19
+ /** ResponseOutputMessage; id optional for stream/SSE. */
20
+ export type OpenAIResponseOutputMessage = {
21
+ type: 'message';
22
+ id?: string;
23
+ role: 'assistant';
24
+ content: Array<OpenAIResponseOutputText | OpenAIResponseOutputRefusal>;
25
+ status?: 'in_progress' | 'completed' | 'incomplete';
26
+ };
27
+ /** ResponseOutputText; annotations optional for raw SSE / partial responses. */
28
+ export type OpenAIResponseOutputText = Pick<ResponseOutputText, 'text' | 'type'> & {
29
+ annotations?: ResponseOutputText['annotations'];
30
+ };
31
+ /** Re-export from openai. */
32
+ export type OpenAIResponseOutputRefusal = ResponseOutputRefusal;
33
+ /** ResponseReasoningItem; id/summary optional for stream. */
34
+ export type OpenAIResponseReasoningItem = {
35
+ type: 'reasoning';
36
+ id?: string;
37
+ content?: Array<{
38
+ type: 'reasoning_text';
39
+ text: string;
40
+ }>;
41
+ summary?: Array<{
42
+ type: string;
43
+ text: string;
44
+ }>;
45
+ };
46
+ /** ResponseFunctionToolCall; arguments optional for stream. */
47
+ export type OpenAIResponseFunctionToolCall = {
48
+ type: 'function_call';
49
+ call_id: string;
50
+ name: string;
51
+ arguments?: string;
52
+ id?: string;
53
+ };
54
+ /** ResponseOutputItem.McpCall; name/server_label/arguments optional for stream. */
55
+ export type OpenAIResponseMcpCallLike = {
56
+ type: 'mcp_call';
57
+ id: string;
58
+ name?: string;
59
+ server_label?: string;
60
+ arguments?: string;
61
+ status?: 'in_progress' | 'completed' | 'incomplete' | 'calling' | 'failed';
62
+ output?: string | null;
63
+ error?: string | null;
64
+ };
65
+ /** ResponseOutputItem.McpApprovalRequest; name/server_label/arguments optional for stream. */
66
+ export type OpenAIResponseMcpApprovalRequestLike = {
67
+ type: 'mcp_approval_request';
68
+ id: string;
69
+ name?: string;
70
+ server_label?: string;
71
+ arguments?: string;
72
+ };
73
+ /** Future: if API adds mcp_submission_request, map to waitingSubmission. */
74
+ export type OpenAIResponseMcpSubmissionRequestLike = {
75
+ type: 'mcp_submission_request';
76
+ id: string;
77
+ name?: string;
78
+ server_label?: string;
79
+ arguments?: string;
80
+ };
81
+ export type OpenAIResponseOutputItem = OpenAIResponseOutputMessage | OpenAIResponseReasoningItem | OpenAIResponseFunctionToolCall | OpenAIResponseMcpCallLike | OpenAIResponseMcpApprovalRequestLike | OpenAIResponseMcpSubmissionRequestLike | {
82
+ type: string;
83
+ };
84
+ /** Permissive item for stream events (response.output_item.added/done). */
85
+ export type OpenAIStreamOutputItemLike = {
86
+ type?: string;
87
+ [key: string]: unknown;
88
+ };
89
+ /** ResponseStreamEvent-compatible; includes SSE wrapper (event/data) for raw fetch. */
90
+ export type OpenAIStreamEventLike = {
91
+ type: 'response.output_text.delta';
92
+ delta: string;
93
+ } | {
94
+ type: 'response.output_text.done';
95
+ text?: string;
96
+ } | {
97
+ type: 'response.content_part.delta';
98
+ delta?: string;
99
+ } | {
100
+ type: 'response.output_item.added';
101
+ item?: OpenAIStreamOutputItemLike;
102
+ } | {
103
+ type: 'response.output_item.done';
104
+ item?: OpenAIStreamOutputItemLike;
105
+ } | {
106
+ type: 'response.mcp_call.in_progress';
107
+ item_id: string;
108
+ } | {
109
+ type: 'response.mcp_call.completed';
110
+ item_id: string;
111
+ } | {
112
+ type: 'response.mcp_call.failed';
113
+ item_id: string;
114
+ } | {
115
+ type: 'response.reasoning_text.delta';
116
+ item_id: string;
117
+ delta: string;
118
+ } | {
119
+ type: 'response.reasoning_text.done';
120
+ item_id: string;
121
+ text: string;
122
+ } | {
123
+ type: 'response.refusal.delta';
124
+ item_id: string;
125
+ delta: string;
126
+ output_index?: number;
127
+ } | {
128
+ type: 'response.refusal.done';
129
+ item_id: string;
130
+ output_index?: number;
131
+ refusal: string;
132
+ } | {
133
+ type: 'response.reasoning_summary_text.delta';
134
+ item_id: string;
135
+ delta: string;
136
+ } | {
137
+ type: 'response.reasoning_summary_text.done';
138
+ item_id: string;
139
+ text: string;
140
+ } | {
141
+ type: 'response.file_search_call.searching';
142
+ item_id: string;
143
+ } | {
144
+ type: 'response.file_search_call.completed';
145
+ item_id: string;
146
+ } | {
147
+ type: 'response.file_search_call.in_progress';
148
+ item_id: string;
149
+ } | {
150
+ type: 'response.web_search_call.in_progress';
151
+ item_id: string;
152
+ } | {
153
+ type: 'response.web_search_call.completed';
154
+ item_id: string;
155
+ } | {
156
+ type: 'response.web_search_call.searching';
157
+ item_id: string;
158
+ } | {
159
+ type: 'response.code_interpreter_call.interpreting';
160
+ item_id: string;
161
+ } | {
162
+ type: 'response.code_interpreter_call.completed';
163
+ item_id: string;
164
+ } | {
165
+ type: 'response.code_interpreter_call.in_progress';
166
+ item_id: string;
167
+ } | {
168
+ type: 'response.image_generation_call.generating';
169
+ item_id: string;
170
+ } | {
171
+ type: 'response.image_generation_call.completed';
172
+ item_id: string;
173
+ } | {
174
+ type: 'response.image_generation_call.in_progress';
175
+ item_id: string;
176
+ } | {
177
+ type: 'response.mcp_list_tools.in_progress';
178
+ item_id: string;
179
+ } | {
180
+ type: 'response.mcp_list_tools.completed';
181
+ item_id: string;
182
+ } | {
183
+ type: 'response.mcp_list_tools.failed';
184
+ item_id: string;
185
+ error?: string;
186
+ } | {
187
+ type: 'response.done';
188
+ } | {
189
+ type: 'response.completed';
190
+ } | {
191
+ type: 'response.failed';
192
+ } | {
193
+ type: 'error';
194
+ error?: string;
195
+ } | {
196
+ event?: string;
197
+ data?: {
198
+ type?: string;
199
+ delta?: string;
200
+ text?: string;
201
+ item?: OpenAIStreamOutputItemLike;
202
+ item_id?: string;
203
+ };
204
+ item?: OpenAIStreamOutputItemLike;
205
+ item_id?: string;
206
+ error?: string;
207
+ refusal?: string;
208
+ } | {
209
+ text?: string;
210
+ } | {
211
+ type?: string;
212
+ [key: string]: unknown;
213
+ };
@@ -0,0 +1 @@
1
+ export {};
@@ -0,0 +1,12 @@
1
+ import type { TChatMessage } from '../../types';
2
+ import type { OpenAIStreamAdapterOptions, OpenAIStreamAdapterResult, OpenAIStreamSource } from './types';
3
+ import { OpenAIResponseLike } from './types/openAiTypes';
4
+ export type { OpenAIStreamAdapterOptions, OpenAIStreamAdapterResult, OpenAIStreamSource, } from './types';
5
+ /** Single non-streaming response → TChatMessage[]. For streaming use useOpenAIStreamAdapter. */
6
+ export declare function useOpenAIResponsesAdapter(response: OpenAIResponseLike | null): TChatMessage[];
7
+ /**
8
+ * Consumes OpenAI stream (fetch SSE or AsyncIterable), returns { messages, status, error }.
9
+ * Only message output_item.done starts a new assistant message; MCP/tool/reasoning .done are ignored.
10
+ * See README for result shape and options.
11
+ */
12
+ export declare function useOpenAIStreamAdapter(stream: OpenAIStreamSource | null, options?: OpenAIStreamAdapterOptions): OpenAIStreamAdapterResult;
@@ -0,0 +1,83 @@
1
+ import { useEffect, useMemo, useRef, useState } from 'react';
2
+ import { consumeOpenAIStream } from './helpers/consumeOpenAIStream';
3
+ import { fetchResponseToStreamEvents } from './helpers/fetchResponseToStreamEvents';
4
+ import { isFetchResponse } from './helpers/isFetchResponse';
5
+ import { openAIResponseToMessages } from './helpers/openAIResponseToMessages';
6
+ /** Single non-streaming response → TChatMessage[]. For streaming use useOpenAIStreamAdapter. */
7
+ export function useOpenAIResponsesAdapter(response) {
8
+ return useMemo(() => openAIResponseToMessages(response), [response]);
9
+ }
10
+ /**
11
+ * Consumes OpenAI stream (fetch SSE or AsyncIterable), returns { messages, status, error }.
12
+ * Only message output_item.done starts a new assistant message; MCP/tool/reasoning .done are ignored.
13
+ * See README for result shape and options.
14
+ */
15
+ export function useOpenAIStreamAdapter(stream, options = {}) {
16
+ const { initialMessages = [], assistantMessageId: optionId, onStreamEnd } = options;
17
+ const onStreamEndRef = useRef(onStreamEnd);
18
+ onStreamEndRef.current = onStreamEnd;
19
+ const initialMessagesRef = useRef(initialMessages);
20
+ initialMessagesRef.current = initialMessages;
21
+ const [messages, setMessages] = useState(initialMessages);
22
+ const [status, setStatus] = useState('idle');
23
+ const [error, setError] = useState(null);
24
+ const assistantMessageId = useMemo(() => optionId !== null && optionId !== void 0 ? optionId : `assistant-${Date.now()}`, [optionId]);
25
+ useEffect(() => {
26
+ if (!stream) {
27
+ setStatus('idle');
28
+ setError(null);
29
+ return undefined;
30
+ }
31
+ const streamToConsume = isFetchResponse(stream)
32
+ ? fetchResponseToStreamEvents(stream)
33
+ : stream;
34
+ let cancelled = false;
35
+ setStatus('streaming');
36
+ setError(null);
37
+ const baseMessages = initialMessagesRef.current;
38
+ const getAssistantMessageId = (index) => index === 0 ? assistantMessageId : `${assistantMessageId}-${index}`;
39
+ setMessages([
40
+ ...baseMessages,
41
+ {
42
+ id: getAssistantMessageId(0),
43
+ role: 'assistant',
44
+ content: '',
45
+ },
46
+ ]);
47
+ const callbacks = {
48
+ baseMessages,
49
+ getAssistantMessageId,
50
+ onContentUpdate: (messageId, content) => {
51
+ if (cancelled)
52
+ return;
53
+ setMessages((prev) => prev.map((msg) => msg.id === messageId && msg.role === 'assistant'
54
+ ? Object.assign(Object.assign({}, msg), { content })
55
+ : msg));
56
+ },
57
+ onNewMessage: (messageId) => {
58
+ if (cancelled)
59
+ return;
60
+ setMessages((prev) => [
61
+ ...prev,
62
+ {
63
+ id: messageId,
64
+ role: 'assistant',
65
+ content: '',
66
+ },
67
+ ]);
68
+ },
69
+ onEnd: (finalMessages, s, err) => {
70
+ var _a;
71
+ setStatus(s);
72
+ setError(err !== null && err !== void 0 ? err : null);
73
+ (_a = onStreamEndRef.current) === null || _a === void 0 ? void 0 : _a.call(onStreamEndRef, finalMessages);
74
+ },
75
+ getIsCancelled: () => cancelled,
76
+ };
77
+ consumeOpenAIStream(streamToConsume, callbacks);
78
+ return () => {
79
+ cancelled = true;
80
+ };
81
+ }, [stream, assistantMessageId]);
82
+ return { messages, status, error };
83
+ }
@@ -9,8 +9,8 @@ import { UserMessage } from '../UserMessage';
9
9
  import { ErrorAlert } from './ErrorAlert';
10
10
  import './MessageList.scss';
11
11
  const b = block('message-list');
12
- export function MessageList({ messages, messageRendererRegistry, transformOptions, shouldParseIncompleteMarkdown, showActionsOnHover, showTimestamp, showAvatar, userActions, assistantActions, loaderStatuses = ['submitted'], className, qa, status, errorMessage, onRetry, hasPreviousMessages = false, onLoadPreviousMessages, }) {
13
- const isStreaming = status === 'streaming';
12
+ export function MessageList({ messages, messageRendererRegistry, transformOptions, shouldParseIncompleteMarkdown, showActionsOnHover, showTimestamp, showAvatar, userActions, assistantActions, loaderStatuses = ['submitted', 'streaming_loading'], className, qa, status, errorMessage, onRetry, hasPreviousMessages = false, onLoadPreviousMessages, }) {
13
+ const isStreaming = status === 'streaming' || status === 'streaming_loading';
14
14
  const isSubmitted = status === 'submitted';
15
15
  const showLoader = status && loaderStatuses.includes(status);
16
16
  const { containerRef } = useSmartScroll({
@@ -16,6 +16,7 @@ export function usePromptInput(props) {
16
16
  // ChatStatus.ready → submitButtonState.enabled
17
17
  // ChatStatus.error → submitButtonState.enabled
18
18
  // ChatStatus.streaming → submitButtonState.cancelable
19
+ // ChatStatus.streaming_loading → submitButtonState.cancelable (same as streaming)
19
20
  // ChatStatus.submitted → submitButtonState.loading
20
21
  let submitButtonState = 'disabled';
21
22
  // disabled by props or empty value and status is ready
@@ -32,6 +33,7 @@ export function usePromptInput(props) {
32
33
  submitButtonState = 'enabled';
33
34
  break;
34
35
  case 'streaming':
36
+ case 'streaming_loading':
35
37
  submitButtonState = onCancel ? 'cancelable' : 'enabled';
36
38
  break;
37
39
  case 'submitted':
@@ -359,9 +359,16 @@ export const WithStreaming = {
359
359
  actions: createMessageActions(assistantMessageId, 'assistant'),
360
360
  },
361
361
  ]);
362
- // Simulate word-by-word streaming
362
+ // Simulate word-by-word streaming with a streaming_loading pause in the middle
363
363
  const words = fullResponse.split(' ');
364
+ const pauseIndex = Math.floor(words.length / 2);
364
365
  for (let i = 0; i < words.length; i++) {
366
+ // Pause streaming at the midpoint: switch to streaming_loading for 5 seconds
367
+ if (i === pauseIndex) {
368
+ setStatus('streaming_loading');
369
+ await new Promise((resolve) => setTimeout(resolve, 5000));
370
+ setStatus('streaming');
371
+ }
365
372
  await new Promise((resolve) => setTimeout(resolve, 100));
366
373
  const currentText = words.slice(0, i + 1).join(' ');
367
374
  setMessages((prev) => prev.map((msg) => msg.id === assistantMessageId
@@ -747,7 +754,10 @@ export const FullStreamingExample = {
747
754
  const [controller, setController] = useState(null);
748
755
  const isProcessingRef = React.useRef(false);
749
756
  const handleSendMessage = async (data) => {
750
- if (isProcessingRef.current || status === 'streaming' || status === 'submitted') {
757
+ if (isProcessingRef.current ||
758
+ status === 'streaming' ||
759
+ status === 'streaming_loading' ||
760
+ status === 'submitted') {
751
761
  return;
752
762
  }
753
763
  isProcessingRef.current = true;
package/dist/index.d.ts CHANGED
@@ -7,5 +7,6 @@ export * from './components/molecules';
7
7
  export * from './components/organisms';
8
8
  export * from './components/templates';
9
9
  export * from './components/pages';
10
+ export * from './adapters';
10
11
  export * from './hooks';
11
12
  export * from './utils';
package/dist/index.js CHANGED
@@ -13,6 +13,8 @@ export * from './components/organisms';
13
13
  export * from './components/templates';
14
14
  // === Pages ===
15
15
  export * from './components/pages';
16
+ // === Adapters ===
17
+ export * from './adapters';
16
18
  // === Hooks ===
17
19
  export * from './hooks';
18
20
  // === Utilities ===
@@ -5,7 +5,7 @@ export type ChatType = {
5
5
  lastMessage?: string;
6
6
  metadata?: Record<string, unknown>;
7
7
  };
8
- export type ChatStatus = 'submitted' | 'streaming' | 'ready' | 'error';
8
+ export type ChatStatus = 'submitted' | 'streaming' | 'streaming_loading' | 'ready' | 'error';
9
9
  /**
10
10
  * List item type for chat history that can be either a chat or a date header
11
11
  */
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@gravity-ui/aikit",
3
- "version": "1.3.5",
3
+ "version": "1.5.0",
4
4
  "description": "Gravity UI base kit for building ai assistant chats",
5
5
  "license": "MIT",
6
6
  "main": "./dist/index.js",
@@ -145,5 +145,8 @@
145
145
  "react-window": "^2.2.1",
146
146
  "remend": "^1.0.1",
147
147
  "uuid": "^13.0.0"
148
+ },
149
+ "optionalDependencies": {
150
+ "openai": "^6.22.0"
148
151
  }
149
152
  }