@copilotkit/react-core 0.16.0 → 0.17.0-alpha.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (142) hide show
  1. package/.turbo/turbo-build.log +168 -134
  2. package/CHANGELOG.md +6 -0
  3. package/dist/components/copilot-provider/copilot-provider-props.d.ts +3 -3
  4. package/dist/components/copilot-provider/copilot-provider-props.mjs +1 -2
  5. package/dist/components/copilot-provider/copilot-provider-props.mjs.map +1 -1
  6. package/dist/components/copilot-provider/copilot-provider.d.ts +3 -3
  7. package/dist/components/copilot-provider/copilot-provider.mjs +468 -7
  8. package/dist/components/copilot-provider/copilot-provider.mjs.map +1 -1
  9. package/dist/components/copilot-provider/index.d.ts +3 -3
  10. package/dist/components/copilot-provider/index.mjs +467 -8
  11. package/dist/components/copilot-provider/index.mjs.map +1 -1
  12. package/dist/components/copilot-provider/standard-copilot-api-config.d.ts +3 -3
  13. package/dist/components/copilot-provider/standard-copilot-api-config.mjs +12 -3
  14. package/dist/components/copilot-provider/standard-copilot-api-config.mjs.map +1 -1
  15. package/dist/components/index.d.ts +3 -3
  16. package/dist/components/index.mjs +467 -9
  17. package/dist/components/index.mjs.map +1 -1
  18. package/dist/context/copilot-context.d.ts +4 -8
  19. package/dist/context/copilot-context.mjs +70 -3
  20. package/dist/context/copilot-context.mjs.map +1 -1
  21. package/dist/context/index.d.ts +4 -4
  22. package/dist/context/index.mjs +69 -4
  23. package/dist/context/index.mjs.map +1 -1
  24. package/dist/hooks/index.d.ts +6 -1
  25. package/dist/hooks/index.mjs +654 -14
  26. package/dist/hooks/index.mjs.map +1 -1
  27. package/dist/hooks/use-chat.d.ts +84 -0
  28. package/dist/hooks/use-chat.mjs +461 -0
  29. package/dist/hooks/use-chat.mjs.map +1 -0
  30. package/dist/hooks/use-copilot-chat.d.ts +10 -3
  31. package/dist/hooks/use-copilot-chat.mjs +599 -10
  32. package/dist/hooks/use-copilot-chat.mjs.map +1 -1
  33. package/dist/hooks/use-flat-category-store.mjs +68 -3
  34. package/dist/hooks/use-flat-category-store.mjs.map +1 -1
  35. package/dist/hooks/use-make-copilot-actionable.mjs +95 -4
  36. package/dist/hooks/use-make-copilot-actionable.mjs.map +1 -1
  37. package/dist/hooks/use-make-copilot-document-readable.mjs +87 -4
  38. package/dist/hooks/use-make-copilot-document-readable.mjs.map +1 -1
  39. package/dist/hooks/use-make-copilot-readable.mjs +87 -4
  40. package/dist/hooks/use-make-copilot-readable.mjs.map +1 -1
  41. package/dist/hooks/use-tree.mjs +153 -3
  42. package/dist/hooks/use-tree.mjs.map +1 -1
  43. package/dist/index.d.ts +6 -2
  44. package/dist/index.mjs +1204 -21
  45. package/dist/index.mjs.map +1 -1
  46. package/dist/openai/chat-completion-client.d.ts +56 -0
  47. package/dist/openai/chat-completion-client.mjs +360 -0
  48. package/dist/openai/chat-completion-client.mjs.map +1 -0
  49. package/dist/openai/chat-completion-stream.d.ts +21 -0
  50. package/dist/openai/chat-completion-stream.mjs +221 -0
  51. package/dist/openai/chat-completion-stream.mjs.map +1 -0
  52. package/dist/openai/chat-completion-transport.d.ts +40 -0
  53. package/dist/openai/chat-completion-transport.mjs +181 -0
  54. package/dist/openai/chat-completion-transport.mjs.map +1 -0
  55. package/dist/openai/index.d.ts +10 -0
  56. package/dist/openai/index.mjs +221 -0
  57. package/dist/openai/index.mjs.map +1 -0
  58. package/dist/openai-assistants/hooks/index.mjs +235 -14
  59. package/dist/openai-assistants/hooks/index.mjs.map +1 -1
  60. package/dist/openai-assistants/hooks/use-assistants.mjs +52 -8
  61. package/dist/openai-assistants/hooks/use-assistants.mjs.map +1 -1
  62. package/dist/openai-assistants/hooks/use-copilot-chat-v2.mjs +236 -13
  63. package/dist/openai-assistants/hooks/use-copilot-chat-v2.mjs.map +1 -1
  64. package/dist/openai-assistants/index.mjs +236 -15
  65. package/dist/openai-assistants/index.mjs.map +1 -1
  66. package/dist/openai-assistants/utils/index.mjs +46 -4
  67. package/dist/openai-assistants/utils/index.mjs.map +1 -1
  68. package/dist/openai-assistants/utils/process-message-stream.mjs +46 -3
  69. package/dist/openai-assistants/utils/process-message-stream.mjs.map +1 -1
  70. package/dist/types/annotated-function.mjs +0 -2
  71. package/dist/types/annotated-function.mjs.map +1 -1
  72. package/dist/types/base.d.ts +56 -0
  73. package/dist/types/base.mjs +1 -0
  74. package/dist/types/base.mjs.map +1 -0
  75. package/dist/types/document-pointer.mjs +0 -2
  76. package/dist/types/document-pointer.mjs.map +1 -1
  77. package/dist/types/index.d.ts +1 -0
  78. package/dist/types/index.mjs +0 -2
  79. package/dist/types/index.mjs.map +1 -1
  80. package/dist/types/message.d.ts +2 -0
  81. package/dist/types/message.mjs +1 -0
  82. package/dist/types/message.mjs.map +1 -0
  83. package/dist/utils/utils.mjs +0 -2
  84. package/dist/utils/utils.mjs.map +1 -1
  85. package/dist/utils/utils.test.mjs +0 -1
  86. package/dist/utils/utils.test.mjs.map +1 -1
  87. package/package.json +4 -4
  88. package/src/components/copilot-provider/copilot-provider.tsx +4 -5
  89. package/src/context/copilot-context.tsx +1 -12
  90. package/src/context/index.ts +0 -1
  91. package/src/hooks/index.ts +1 -0
  92. package/src/hooks/use-chat.ts +197 -0
  93. package/src/hooks/use-copilot-chat.ts +10 -22
  94. package/src/index.tsx +1 -0
  95. package/src/openai/chat-completion-client.ts +240 -0
  96. package/src/openai/chat-completion-stream.ts +56 -0
  97. package/src/openai/chat-completion-transport.ts +190 -0
  98. package/src/openai/index.tsx +5 -0
  99. package/src/openai-assistants/hooks/use-copilot-chat-v2.ts +2 -2
  100. package/src/types/base.ts +61 -0
  101. package/src/types/index.ts +1 -0
  102. package/src/types/message.ts +0 -0
  103. package/dist/chunk-7GFKOIO7.mjs +0 -3
  104. package/dist/chunk-7GFKOIO7.mjs.map +0 -1
  105. package/dist/chunk-BABVSMJR.mjs +0 -3
  106. package/dist/chunk-BABVSMJR.mjs.map +0 -1
  107. package/dist/chunk-DE37LEZJ.mjs +0 -79
  108. package/dist/chunk-DE37LEZJ.mjs.map +0 -1
  109. package/dist/chunk-EFZPSZWO.mjs +0 -3
  110. package/dist/chunk-EFZPSZWO.mjs.map +0 -1
  111. package/dist/chunk-EV26IMLL.mjs +0 -165
  112. package/dist/chunk-EV26IMLL.mjs.map +0 -1
  113. package/dist/chunk-F2JIAPZQ.mjs +0 -13
  114. package/dist/chunk-F2JIAPZQ.mjs.map +0 -1
  115. package/dist/chunk-FRAKUJWH.mjs +0 -3
  116. package/dist/chunk-FRAKUJWH.mjs.map +0 -1
  117. package/dist/chunk-IF64NU27.mjs +0 -125
  118. package/dist/chunk-IF64NU27.mjs.map +0 -1
  119. package/dist/chunk-JD7BAH7U.mjs +0 -3
  120. package/dist/chunk-JD7BAH7U.mjs.map +0 -1
  121. package/dist/chunk-MRXNTQOX.mjs +0 -55
  122. package/dist/chunk-MRXNTQOX.mjs.map +0 -1
  123. package/dist/chunk-MZ5UN3BY.mjs +0 -28
  124. package/dist/chunk-MZ5UN3BY.mjs.map +0 -1
  125. package/dist/chunk-PF7LXYPO.mjs +0 -19
  126. package/dist/chunk-PF7LXYPO.mjs.map +0 -1
  127. package/dist/chunk-QACD2U6P.mjs +0 -3
  128. package/dist/chunk-QACD2U6P.mjs.map +0 -1
  129. package/dist/chunk-RFZQHCNS.mjs +0 -19
  130. package/dist/chunk-RFZQHCNS.mjs.map +0 -1
  131. package/dist/chunk-SPCZTZCY.mjs +0 -3
  132. package/dist/chunk-SPCZTZCY.mjs.map +0 -1
  133. package/dist/chunk-VUY2K2DI.mjs +0 -135
  134. package/dist/chunk-VUY2K2DI.mjs.map +0 -1
  135. package/dist/chunk-WL2MC3E2.mjs +0 -27
  136. package/dist/chunk-WL2MC3E2.mjs.map +0 -1
  137. package/dist/chunk-YGJFU4ZP.mjs +0 -58
  138. package/dist/chunk-YGJFU4ZP.mjs.map +0 -1
  139. package/dist/chunk-YPSGKPDA.mjs +0 -3
  140. package/dist/chunk-YPSGKPDA.mjs.map +0 -1
  141. package/dist/chunk-YULKJPY3.mjs +0 -70
  142. package/dist/chunk-YULKJPY3.mjs.map +0 -1
@@ -1 +1 @@
1
- {"version":3,"sources":[],"names":[],"mappings":""}
1
+ {"version":3,"sources":["../../src/hooks/use-copilot-chat.ts","../../src/context/copilot-context.tsx","../../src/hooks/use-chat.ts","../../src/openai/chat-completion-client.ts","../../src/openai/chat-completion-transport.ts","../../src/components/copilot-provider/copilot-provider.tsx","../../src/hooks/use-make-copilot-actionable.ts","../../src/hooks/use-make-copilot-readable.ts","../../src/hooks/use-make-copilot-document-readable.ts"],"sourcesContent":["import { useMemo, useContext } from \"react\";\nimport { CopilotContext, CopilotContextParams } from \"../context/copilot-context\";\nimport { Message } from \"../types\";\nimport { UseChatOptions, useChat } from \"./use-chat\";\nimport { defaultCopilotContextCategories } from \"../components\";\nimport { ChatCompletionCreateParams } from \"openai/resources/chat\";\n\nexport interface UseCopilotChatOptions extends UseChatOptions {\n makeSystemMessage?: (contextString: string) => string;\n}\n\nexport interface UseCopilotChatReturn {\n visibleMessages: Message[];\n append: (message: Message) => Promise<void>;\n reload: () => Promise<void>;\n stop: () => void;\n isLoading: boolean;\n input: string;\n setInput: React.Dispatch<React.SetStateAction<string>>;\n}\n\nexport function useCopilotChat({\n makeSystemMessage,\n ...options\n}: UseCopilotChatOptions): UseCopilotChatReturn {\n const {\n getContextString,\n getChatCompletionFunctionDescriptions,\n getFunctionCallHandler,\n copilotApiConfig,\n } = useContext(CopilotContext);\n\n const systemMessage: Message = useMemo(() => {\n const systemMessageMaker = makeSystemMessage || defaultSystemMessage;\n const contextString = getContextString([], defaultCopilotContextCategories); // TODO: make the context categories configurable\n\n return {\n id: \"system\",\n content: systemMessageMaker(contextString),\n role: \"system\",\n };\n }, [getContextString, makeSystemMessage]);\n\n const functionDescriptions: ChatCompletionCreateParams.Function[] = useMemo(() => {\n return getChatCompletionFunctionDescriptions();\n }, [getChatCompletionFunctionDescriptions]);\n\n const { messages, append, reload, stop, isLoading, input, setInput } = useChat({\n ...options,\n copilotConfig: copilotApiConfig,\n id: options.id,\n initialMessages: [systemMessage].concat(options.initialMessages || []),\n functions: functionDescriptions,\n onFunctionCall: getFunctionCallHandler(),\n headers: { ...options.headers },\n body: {\n ...options.body,\n },\n });\n\n const visibleMessages = messages.filter(\n (message) => message.role === \"user\" || message.role === \"assistant\",\n );\n\n return {\n visibleMessages,\n append,\n reload,\n stop,\n isLoading,\n input,\n setInput,\n };\n}\n\nexport function defaultSystemMessage(contextString: string): string {\n return `\nPlease act as an efficient, competent, conscientious, and industrious professional assistant.\n\nHelp the user achieve their goals, and you do so in a way that is as efficient as possible, without unnecessary fluff, but also without sacrificing professionalism.\nAlways be polite and respectful, and prefer brevity over verbosity.\n\nThe user has provided you with the following context:\n\\`\\`\\`\n${contextString}\n\\`\\`\\`\n\nThey have also provided you with functions you can call to initiate actions on their behalf, or functions you can call to receive more information.\n\nPlease assist them as best you can.\n\nYou can ask them for clarifying questions if needed, but don't be annoying about it. If you can reasonably 'fill in the blanks' yourself, do so.\n\nIf you would like to call a function, call it without saying anything else.\n`;\n}\n","\"use client\";\n\nimport { FunctionCallHandler } from \"../types\";\nimport React from \"react\";\nimport { TreeNodeId } from \"../hooks/use-tree\";\nimport { AnnotatedFunction } from \"../types/annotated-function\";\nimport { ChatCompletionCreateParams } from \"openai/resources/chat\";\nimport { DocumentPointer } from \"../types\";\n\n/**\n * Interface for the configuration of the Copilot API.\n */\nexport interface CopilotApiConfig {\n /**\n * The endpoint for the chat API.\n */\n chatApiEndpoint: string;\n\n /**\n * The endpoint for the chat API v2.\n */\n chatApiEndpointV2: string;\n\n /**\n * additional headers to be sent with the request\n * @default {}\n * @example\n * ```\n * {\n * 'Authorization': 'Bearer your_token_here'\n * }\n * ```\n */\n headers: Record<string, string>;\n\n /**\n * Additional body params to be sent with the request\n * @default {}\n * @example\n * ```\n * {\n * 'message': 'Hello, world!'\n * }\n * ```\n */\n body: Record<string, any>;\n}\n\nexport interface CopilotContextParams {\n // function-calling\n entryPoints: Record<string, AnnotatedFunction<any[]>>;\n setEntryPoint: (id: string, entryPoint: AnnotatedFunction<any[]>) => void;\n removeEntryPoint: (id: string) => void;\n getChatCompletionFunctionDescriptions: () => ChatCompletionCreateParams.Function[];\n getFunctionCallHandler: () => FunctionCallHandler;\n\n // text context\n addContext: (context: string, parentId?: string, categories?: string[]) => TreeNodeId;\n removeContext: (id: TreeNodeId) => void;\n getContextString: (documents: DocumentPointer[], categories: string[]) => string;\n\n // document context\n addDocumentContext: (documentPointer: DocumentPointer, categories?: string[]) => TreeNodeId;\n removeDocumentContext: (documentId: string) => void;\n getDocumentsContext: (categories: string[]) => DocumentPointer[];\n\n // api endpoints\n copilotApiConfig: CopilotApiConfig;\n}\n\nconst emptyCopilotContext: CopilotContextParams = {\n entryPoints: {},\n setEntryPoint: () => {},\n removeEntryPoint: () => {},\n getChatCompletionFunctionDescriptions: () => returnAndThrowInDebug([]),\n getFunctionCallHandler: () => returnAndThrowInDebug(async () => {}),\n\n getContextString: (documents: DocumentPointer[], categories: string[]) =>\n returnAndThrowInDebug(\"\"),\n addContext: () => \"\",\n removeContext: () => {},\n\n getDocumentsContext: (categories: string[]) => returnAndThrowInDebug([]),\n addDocumentContext: () => returnAndThrowInDebug(\"\"),\n removeDocumentContext: () => {},\n\n copilotApiConfig: new (class implements CopilotApiConfig {\n get chatApiEndpoint(): string {\n throw new Error(\n \"Remember to wrap your app in a `<CopilotProvider> {...} </CopilotProvider>` !!!\",\n );\n }\n get chatApiEndpointV2(): string {\n throw new Error(\n \"Remember to wrap your app in a `<CopilotProvider> {...} </CopilotProvider>` !!!\",\n );\n }\n get headers(): Record<string, string> {\n return {};\n }\n get body(): Record<string, any> {\n return {};\n }\n })(),\n};\n\nexport const CopilotContext = React.createContext<CopilotContextParams>(emptyCopilotContext);\n\nfunction returnAndThrowInDebug<T>(value: T): T {\n throw new Error(\n \"Remember to wrap your app in a `<CopilotProvider> {...} </CopilotProvider>` !!!\",\n );\n return value;\n}\n","import { useState } from \"react\";\nimport { Message, Function, FunctionCallHandler } from \"../types\";\nimport { nanoid } from \"nanoid\";\nimport { ChatCompletionClient } from \"../openai/chat-completion-client\";\nimport { CopilotApiConfig } from \"../context\";\n\nexport type UseChatOptions = {\n /**\n * The API endpoint that accepts a `{ messages: Message[] }` object and returns\n * a stream of tokens of the AI chat response. Defaults to `/api/chat`.\n */\n api?: string;\n /**\n * A unique identifier for the chat. If not provided, a random one will be\n * generated. When provided, the `useChat` hook with the same `id` will\n * have shared states across components.\n */\n id?: string;\n /**\n * System messages of the chat. Defaults to an empty array.\n */\n initialMessages?: Message[];\n /**\n * Callback function to be called when a function call is received.\n * If the function returns a `ChatRequest` object, the request will be sent\n * automatically to the API and will be used to update the chat.\n */\n onFunctionCall?: FunctionCallHandler;\n /**\n * HTTP headers to be sent with the API request.\n */\n headers?: Record<string, string> | Headers;\n /**\n * Extra body object to be sent with the API request.\n * @example\n * Send a `sessionId` to the API along with the messages.\n * ```js\n * useChat({\n * body: {\n * sessionId: '123',\n * }\n * })\n * ```\n */\n body?: object;\n /**\n * Function definitions to be sent to the API.\n */\n functions?: Function[];\n};\n\nexport type UseChatHelpers = {\n /** Current messages in the chat */\n messages: Message[];\n /**\n * Append a user message to the chat list. This triggers the API call to fetch\n * the assistant's response.\n * @param message The message to append\n */\n append: (message: Message) => Promise<void>;\n /**\n * Reload the last AI chat response for the given chat history. If the last\n * message isn't from the assistant, it will request the API to generate a\n * new response.\n */\n reload: () => Promise<void>;\n /**\n * Abort the current request immediately, keep the generated tokens if any.\n */\n stop: () => void;\n /** The current value of the input */\n input: string;\n /** setState-powered method to update the input value */\n setInput: React.Dispatch<React.SetStateAction<string>>;\n /** Whether the API request is in progress */\n isLoading: boolean;\n};\n\nexport type UseChatOptionsWithCopilotConfig = UseChatOptions & {\n copilotConfig: CopilotApiConfig;\n};\n\nexport function useChat(options: UseChatOptionsWithCopilotConfig): UseChatHelpers {\n const [messages, setMessages] = useState<Message[]>([]);\n const [input, setInput] = useState(\"\");\n const [isLoading, setIsLoading] = useState(false);\n\n const runChatCompletion = async (messages: Message[]): Promise<Message> => {\n return new Promise<Message>((resolve, reject) => {\n setIsLoading(true);\n\n const assistantMessage: Message = {\n id: nanoid(),\n createdAt: new Date(),\n content: \"\",\n role: \"assistant\",\n };\n\n // Assistant messages are always copied when using setState\n setMessages([...messages, { ...assistantMessage }]);\n\n const messagesWithContext = [...(options.initialMessages || []), ...messages];\n\n const client = new ChatCompletionClient({\n url: options.api || \"/api/copilotkit/openai\",\n });\n\n const cleanup = () => {\n client.off(\"content\");\n client.off(\"end\");\n client.off(\"error\");\n client.off(\"function\");\n };\n\n client.on(\"content\", (content) => {\n assistantMessage.content += content;\n setMessages([...messages, { ...assistantMessage }]);\n });\n\n client.on(\"end\", () => {\n setIsLoading(false);\n cleanup();\n resolve({ ...assistantMessage });\n });\n\n client.on(\"error\", (error) => {\n setIsLoading(false);\n cleanup();\n reject(error);\n });\n\n client.on(\"function\", async (functionCall) => {\n assistantMessage.function_call = {\n name: functionCall.name,\n arguments: JSON.stringify(functionCall.arguments),\n };\n setMessages([...messages, { ...assistantMessage }]);\n // quit early if we get a function call\n setIsLoading(false);\n cleanup();\n resolve({ ...assistantMessage });\n });\n\n client.fetch({\n messages: messagesWithContext,\n functions: options.functions,\n headers: options.headers,\n copilotConfig: options.copilotConfig,\n });\n });\n };\n\n const runChatCompletionAndHandleFunctionCall = async (messages: Message[]): Promise<void> => {\n const message = await runChatCompletion(messages);\n if (message.function_call && options.onFunctionCall) {\n await options.onFunctionCall(messages, message.function_call);\n }\n };\n\n const append = async (message: Message): Promise<void> => {\n if (isLoading) {\n return;\n }\n const newMessages = [...messages, message];\n setMessages(newMessages);\n return runChatCompletionAndHandleFunctionCall(newMessages);\n };\n\n const reload = async (): Promise<void> => {\n if (isLoading || messages.length === 0) {\n return;\n }\n let newMessages = [...messages];\n const lastMessage = messages[messages.length - 1];\n\n if (lastMessage.role === \"assistant\") {\n newMessages = newMessages.slice(0, -1);\n }\n setMessages(newMessages);\n\n return runChatCompletionAndHandleFunctionCall(newMessages);\n };\n\n const stop = (): void => {\n throw new Error(\"Not implemented\");\n };\n\n return {\n messages,\n append,\n reload,\n stop,\n isLoading,\n input,\n setInput,\n };\n}\n","import EventEmitter from \"eventemitter3\";\nimport { Function, Message, Role } from \"../types\";\nimport {\n ChatCompletionTransport,\n ChatCompletionTransportFetchParams,\n} from \"./chat-completion-transport\";\n\ninterface ChatCompletionClientConfiguration {\n url: string;\n model?: string;\n}\n\ninterface ChatCompletionClientEvents {\n content: string;\n partial: [string, string];\n error: any;\n function: {\n name: string;\n arguments: any;\n };\n end: void;\n}\n\nexport interface ChatCompletionChunk {\n choices: {\n delta: {\n role: Role;\n content?: string | null;\n function_call?: {\n name?: string;\n arguments?: string;\n };\n };\n }[];\n}\n\nconst DEFAULT_MAX_TOKENS = 8192;\n\nexport class ChatCompletionClient extends EventEmitter<ChatCompletionClientEvents> {\n private url: string;\n private model?: string;\n\n private chatCompletionTransport: ChatCompletionTransport | null = null;\n private mode: \"function\" | \"message\" | null = null;\n private functionCallName: string = \"\";\n private functionCallArguments: string = \"\";\n\n constructor(params: ChatCompletionClientConfiguration) {\n super();\n this.model = params.model;\n this.url = params.url;\n }\n\n public async fetch(params: ChatCompletionTransportFetchParams) {\n params = { ...params };\n if (this.model && this.model in maxTokensByModel) {\n params.maxTokens ||= maxTokensByModel[this.model];\n } else {\n params.maxTokens ||= DEFAULT_MAX_TOKENS;\n }\n\n params.functions ||= [];\n params.model = this.model;\n params.messages = this.buildPrompt(params);\n return await this.runPrompt(params);\n }\n\n private buildPrompt(params: ChatCompletionTransportFetchParams): Message[] {\n let maxTokens = params.maxTokens!;\n const messages = params.messages!;\n const functions = params.functions!;\n const functionsNumTokens = countFunctionsTokens(functions);\n if (functionsNumTokens > maxTokens) {\n throw new Error(`Too many tokens in function calls: ${functionsNumTokens} > ${maxTokens}`);\n }\n maxTokens -= functionsNumTokens;\n\n for (const message of messages) {\n if (message.role === \"system\") {\n const numTokens = this.countTokens(message);\n maxTokens -= numTokens;\n\n if (maxTokens < 0) {\n throw new Error(\"Not enough tokens for system message.\");\n }\n }\n }\n\n const result: Message[] = [];\n let cutoff: boolean = false;\n\n const reversedMessages = [...messages].reverse();\n for (const message of reversedMessages) {\n if (message.role === \"system\") {\n result.unshift(message);\n continue;\n } else if (cutoff) {\n continue;\n }\n let numTokens = this.countTokens(message);\n if (maxTokens < numTokens) {\n cutoff = true;\n continue;\n }\n result.unshift(message);\n maxTokens -= numTokens;\n }\n\n return result;\n }\n\n private async runPrompt(params: ChatCompletionTransportFetchParams): Promise<void> {\n this.chatCompletionTransport = new ChatCompletionTransport({\n url: this.url,\n });\n\n this.chatCompletionTransport.on(\"data\", this.onData);\n this.chatCompletionTransport.on(\"error\", this.onError);\n this.chatCompletionTransport.on(\"end\", this.onEnd);\n\n await this.chatCompletionTransport.fetch(params);\n }\n\n private onData = (data: ChatCompletionChunk) => {\n // In case we are in a function call but the next message is not a function call, flush it.\n if (this.mode === \"function\" && !data.choices[0].delta.function_call) {\n const success = this.tryFlushFunctionCall();\n if (!success) {\n return;\n }\n }\n\n this.mode = data.choices[0].delta.function_call ? \"function\" : \"message\";\n\n if (this.mode === \"message\") {\n // if we get a message, emit the content and return;\n\n if (data.choices[0].delta.content) {\n this.emit(\"content\", data.choices[0].delta.content);\n }\n\n return;\n } else if (this.mode === \"function\") {\n // if we get a function call, we buffer the name and arguments, then emit a partial event.\n\n if (data.choices[0].delta.function_call!.name) {\n this.functionCallName = data.choices[0].delta.function_call!.name!;\n }\n if (data.choices[0].delta.function_call!.arguments) {\n this.functionCallArguments += data.choices[0].delta.function_call!.arguments!;\n }\n this.emit(\"partial\", this.functionCallName, this.functionCallArguments);\n\n return;\n }\n };\n\n private onError = (error: any) => {\n this.emit(\"error\", error);\n this.cleanup();\n };\n\n private onEnd = () => {\n if (this.mode === \"function\") {\n const success = this.tryFlushFunctionCall();\n if (!success) {\n return;\n }\n }\n this.emit(\"end\");\n this.cleanup();\n };\n\n private tryFlushFunctionCall(): boolean {\n let args: any = null;\n try {\n args = JSON.parse(this.functionCallArguments);\n } catch (error) {\n this.emit(\"error\", error);\n this.cleanup();\n return false;\n }\n this.emit(\"function\", {\n name: this.functionCallName,\n arguments: args,\n });\n this.mode = null;\n this.functionCallName = \"\";\n this.functionCallArguments = \"\";\n return true;\n }\n\n private cleanup() {\n if (this.chatCompletionTransport) {\n this.chatCompletionTransport.off(\"data\", this.onData);\n this.chatCompletionTransport.off(\"error\", this.onError);\n this.chatCompletionTransport.off(\"end\", this.onEnd);\n }\n this.chatCompletionTransport = null;\n this.mode = null;\n this.functionCallName = \"\";\n this.functionCallArguments = \"\";\n }\n\n public countTokens(message: Message): number {\n if (message.content) {\n return estimateTokens(message.content);\n } else if (message.function_call) {\n return estimateTokens(JSON.stringify(message.function_call));\n }\n return 0;\n }\n}\n\nconst maxTokensByModel: { [key: string]: number } = {\n \"gpt-3.5-turbo\": 4097,\n \"gpt-3.5-turbo-16k\": 16385,\n \"gpt-4\": 8192,\n \"gpt-4-1106-preview\": 8192,\n \"gpt-4-32k\": 32768,\n \"gpt-3.5-turbo-0301\": 4097,\n \"gpt-4-0314\": 8192,\n \"gpt-4-32k-0314\": 32768,\n \"gpt-3.5-turbo-0613\": 4097,\n \"gpt-4-0613\": 8192,\n \"gpt-4-32k-0613\": 32768,\n \"gpt-3.5-turbo-16k-0613\": 16385,\n};\n\nfunction estimateTokens(text: string): number {\n return text.length / 3;\n}\n\nfunction countFunctionsTokens(functions: Function[]): number {\n if (functions.length === 0) {\n return 0;\n }\n const json = JSON.stringify(functions);\n return estimateTokens(json);\n}\n","import EventEmitter from \"eventemitter3\";\nimport { Message, Function } from \"../types\";\nimport { CopilotApiConfig } from \"../context\";\n\nexport interface ChatCompletionTransportConfiguration {\n url: string;\n}\n\ninterface ChatCompletionTransportEvents {\n end: void;\n data: any;\n error: any;\n}\n\nexport interface ChatCompletionTransportFetchParams {\n model?: string;\n messages: Message[];\n functions?: Function[];\n temperature?: number;\n maxTokens?: number;\n headers?: Record<string, string> | Headers;\n body?: object;\n copilotConfig: CopilotApiConfig;\n signal?: AbortSignal;\n}\n\nconst DEFAULT_MODEL = \"gpt-4-1106-preview\";\n\nexport class ChatCompletionTransport extends EventEmitter<ChatCompletionTransportEvents> {\n private buffer = new Uint8Array();\n private bodyReader: ReadableStreamDefaultReader<Uint8Array> | null = null;\n private url: string;\n\n constructor({ url }: ChatCompletionTransportConfiguration) {\n super();\n this.url = url;\n }\n\n private async cleanup() {\n if (this.bodyReader) {\n try {\n await this.bodyReader.cancel();\n } catch (error) {\n console.warn(\"Failed to cancel body reader:\", error);\n }\n }\n this.bodyReader = null;\n this.buffer = new Uint8Array();\n }\n\n public async fetch({\n model,\n messages,\n copilotConfig,\n functions,\n temperature,\n headers,\n body,\n signal,\n }: ChatCompletionTransportFetchParams): Promise<void> {\n await this.cleanup();\n\n temperature ||= 0.5;\n functions ||= [];\n model ||= DEFAULT_MODEL;\n\n // clean up any extra properties from messages\n const cleanedMessages = messages.map((message) => {\n const { content, role, name, function_call } = message;\n return { content, role, name, function_call };\n });\n\n try {\n const response = await fetch(this.url, {\n method: \"POST\",\n headers: {\n \"Content-Type\": \"application/json\",\n ...copilotConfig.headers,\n ...(headers ? { ...headers } : {}),\n },\n body: JSON.stringify({\n model,\n messages: cleanedMessages,\n stream: true,\n ...(functions.length ? { functions } : {}),\n ...(temperature ? { temperature } : {}),\n ...(functions.length != 0 ? { function_call: \"auto\" } : {}),\n ...copilotConfig.body,\n ...(body ? { ...body } : {}),\n }),\n signal,\n });\n\n if (!response.ok) {\n try {\n const errorText = await response.text();\n await this.cleanup();\n const msg = `Error ${response.status}: ${errorText}`;\n this.emit(\"error\", new Error(msg));\n } catch (_error) {\n await this.cleanup();\n const msg = `Error ${response.status}: ${response.statusText}`;\n this.emit(\"error\", new Error(msg));\n }\n return;\n }\n\n if (response.body == null) {\n await this.cleanup();\n const msg = \"Response body is null\";\n this.emit(\"error\", new Error(msg));\n return;\n }\n\n this.bodyReader = response.body.getReader();\n\n await this.streamBody();\n } catch (error) {\n await this.cleanup();\n this.emit(\"error\", error);\n return;\n }\n }\n\n private async streamBody() {\n while (true) {\n try {\n const { done, value } = await this.bodyReader!.read();\n\n if (done) {\n await this.cleanup();\n this.emit(\"end\");\n return;\n }\n\n const shouldContinue = await this.processData(value);\n\n if (!shouldContinue) {\n return;\n }\n } catch (error) {\n await this.cleanup();\n this.emit(\"error\", error);\n return;\n }\n }\n }\n\n private async processData(data: Uint8Array): Promise<boolean> {\n // Append new data to the temp buffer\n const newBuffer = new Uint8Array(this.buffer.length + data.length);\n newBuffer.set(this.buffer);\n newBuffer.set(data, this.buffer.length);\n this.buffer = newBuffer;\n\n const dataString = new TextDecoder(\"utf-8\").decode(this.buffer);\n\n let lines = dataString.split(\"\\n\").filter((line) => line.trim() !== \"\");\n\n // If the last line isn't complete, keep it in the buffer for next time\n if (!dataString.endsWith(\"\\n\")) {\n const lastLine = lines.pop() || \"\";\n const remainingBytes = new TextEncoder().encode(lastLine);\n this.buffer = new Uint8Array(remainingBytes);\n } else {\n this.buffer = new Uint8Array();\n }\n\n for (const line of lines) {\n const cleanedLine = line.replace(/^data: /, \"\");\n\n if (cleanedLine === \"[DONE]\") {\n await this.cleanup();\n this.emit(\"end\");\n return false;\n }\n\n let json;\n try {\n json = JSON.parse(cleanedLine);\n } catch (error) {\n console.error(\"Failed to parse JSON:\", error);\n continue;\n }\n\n this.emit(\"data\", json);\n }\n return true;\n }\n}\n","\"use client\";\n\nimport { useCallback, useState } from \"react\";\nimport { CopilotContext, CopilotApiConfig } from \"../../context/copilot-context\";\nimport useTree from \"../../hooks/use-tree\";\nimport { AnnotatedFunction } from \"../../types/annotated-function\";\nimport { ChatCompletionCreateParams } from \"openai/resources/chat\";\nimport { DocumentPointer, FunctionCallHandler } from \"../../types\";\nimport useFlatCategoryStore from \"../../hooks/use-flat-category-store\";\nimport { StandardCopilotApiConfig } from \"./standard-copilot-api-config\";\nimport { CopilotProviderProps } from \"./copilot-provider-props\";\n\n/**\n * The CopilotProvider component.\n * This component provides the Copilot context to its children.\n * It can be configured either with a chat API endpoint or a CopilotApiConfig.\n *\n * NOTE: The backend can use OpenAI, or you can bring your own LLM.\n * For examples of the backend api implementation, see `examples/next-openai` usage (under `src/api/copilotkit`),\n * or read the documentation at https://docs.copilotkit.ai\n * In particular, Getting-Started > Quickstart-Backend: https://docs.copilotkit.ai/getting-started/quickstart-backend\n *\n * Example usage:\n * ```\n * <CopilotProvider chatApiEndpoint=\"https://your.copilotkit.api\">\n * <App />\n * </CopilotProvider>\n * ```\n *\n * or\n *\n * ```\n * const copilotApiConfig = new StandardCopilotApiConfig(\n * \"https://your.copilotkit.api/v1\",\n * \"https://your.copilotkit.api/v2\",\n * {},\n * {}\n * );\n *\n * // ...\n *\n * <CopilotProvider chatApiConfig={copilotApiConfig}>\n * <App />\n * </CopilotProvider>\n * ```\n *\n * @param props - The props for the component.\n * @returns The CopilotProvider component.\n */\nexport function CopilotProvider({ children, ...props }: CopilotProviderProps): JSX.Element {\n // Compute all the functions and properties that we need to pass\n // to the CopilotContext.\n\n const [entryPoints, setEntryPoints] = useState<Record<string, AnnotatedFunction<any[]>>>({});\n\n const { addElement, removeElement, printTree } = useTree();\n\n const {\n addElement: addDocument,\n removeElement: removeDocument,\n allElements: allDocuments,\n } = useFlatCategoryStore<DocumentPointer>();\n\n const setEntryPoint = useCallback((id: string, entryPoint: AnnotatedFunction<any[]>) => {\n setEntryPoints((prevPoints) => {\n return {\n ...prevPoints,\n [id]: entryPoint,\n };\n });\n }, []);\n\n const removeEntryPoint = useCallback((id: string) => {\n setEntryPoints((prevPoints) => {\n const newPoints = { ...prevPoints };\n delete newPoints[id];\n return newPoints;\n });\n }, []);\n\n const getContextString = useCallback(\n (documents: DocumentPointer[], categories: string[]) => {\n const documentsString = documents\n .map((document) => {\n return `${document.name} (${document.sourceApplication}):\\n${document.getContents()}`;\n })\n .join(\"\\n\\n\");\n\n const nonDocumentStrings = printTree(categories);\n\n return `${documentsString}\\n\\n${nonDocumentStrings}`;\n },\n [printTree],\n );\n\n const addContext = useCallback(\n (\n context: string,\n parentId?: string,\n categories: string[] = defaultCopilotContextCategories,\n ) => {\n return addElement(context, categories, parentId);\n },\n [addElement],\n );\n\n const removeContext = useCallback(\n (id: string) => {\n removeElement(id);\n },\n [removeElement],\n );\n\n const getChatCompletionFunctionDescriptions = useCallback(() => {\n return entryPointsToChatCompletionFunctions(Object.values(entryPoints));\n }, [entryPoints]);\n\n const getFunctionCallHandler = useCallback(() => {\n return entryPointsToFunctionCallHandler(Object.values(entryPoints));\n }, [entryPoints]);\n\n const getDocumentsContext = useCallback(\n (categories: string[]) => {\n return allDocuments(categories);\n },\n [allDocuments],\n );\n\n const addDocumentContext = useCallback(\n (documentPointer: DocumentPointer, categories: string[] = defaultCopilotContextCategories) => {\n return addDocument(documentPointer, categories);\n },\n [addDocument],\n );\n\n const removeDocumentContext = useCallback(\n (documentId: string) => {\n removeDocument(documentId);\n },\n [removeDocument],\n );\n\n // get the appropriate CopilotApiConfig from the props\n let copilotApiConfig: CopilotApiConfig;\n if (\"chatApiEndpoint\" in props) {\n copilotApiConfig = new StandardCopilotApiConfig(\n props.chatApiEndpoint,\n props.chatApiEndpointV2 || `${props.chatApiEndpoint}/v2`,\n {},\n {},\n );\n } else {\n copilotApiConfig = props.chatApiConfig;\n }\n\n return (\n <CopilotContext.Provider\n value={{\n entryPoints,\n getChatCompletionFunctionDescriptions,\n getFunctionCallHandler,\n setEntryPoint,\n removeEntryPoint,\n getContextString,\n addContext,\n removeContext,\n getDocumentsContext,\n addDocumentContext,\n removeDocumentContext,\n copilotApiConfig: copilotApiConfig,\n }}\n >\n {children}\n </CopilotContext.Provider>\n );\n}\n\nexport const defaultCopilotContextCategories = [\"global\"];\n\nfunction entryPointsToFunctionCallHandler(\n entryPoints: AnnotatedFunction<any[]>[],\n): FunctionCallHandler {\n return async (chatMessages, functionCall) => {\n let entrypointsByFunctionName: Record<string, AnnotatedFunction<any[]>> = {};\n for (let entryPoint of entryPoints) {\n entrypointsByFunctionName[entryPoint.name] = entryPoint;\n }\n\n const entryPointFunction = entrypointsByFunctionName[functionCall.name || \"\"];\n if (entryPointFunction) {\n let functionCallArguments: Record<string, any>[] = [];\n if (functionCall.arguments) {\n functionCallArguments = JSON.parse(functionCall.arguments);\n }\n\n const paramsInCorrectOrder: any[] = [];\n for (let arg of entryPointFunction.argumentAnnotations) {\n paramsInCorrectOrder.push(\n functionCallArguments[arg.name as keyof typeof functionCallArguments],\n );\n }\n\n await entryPointFunction.implementation(...paramsInCorrectOrder);\n\n // commented out becasue for now we don't want to return anything\n // const result = await entryPointFunction.implementation(\n // ...parsedFunctionCallArguments\n // );\n // const functionResponse: ChatRequest = {\n // messages: [\n // ...chatMessages,\n // {\n // id: nanoid(),\n // name: functionCall.name,\n // role: 'function' as const,\n // content: JSON.stringify(result),\n // },\n // ],\n // };\n\n // return functionResponse;\n }\n };\n}\n\nfunction entryPointsToChatCompletionFunctions(\n entryPoints: AnnotatedFunction<any[]>[],\n): ChatCompletionCreateParams.Function[] {\n return entryPoints.map(annotatedFunctionToChatCompletionFunction);\n}\n\nfunction annotatedFunctionToChatCompletionFunction(\n annotatedFunction: AnnotatedFunction<any[]>,\n): ChatCompletionCreateParams.Function {\n // Create the parameters object based on the argumentAnnotations\n let parameters: { [key: string]: any } = {};\n for (let arg of annotatedFunction.argumentAnnotations) {\n // isolate the args we should forward inline\n let { name, required, ...forwardedArgs } = arg;\n parameters[arg.name] = forwardedArgs;\n }\n\n let requiredParameterNames: string[] = [];\n for (let arg of annotatedFunction.argumentAnnotations) {\n if (arg.required) {\n requiredParameterNames.push(arg.name);\n }\n }\n\n // Create the ChatCompletionFunctions object\n let chatCompletionFunction: ChatCompletionCreateParams.Function = {\n name: annotatedFunction.name,\n description: annotatedFunction.description,\n parameters: {\n type: \"object\",\n properties: parameters,\n required: requiredParameterNames,\n },\n };\n\n return chatCompletionFunction;\n}\n","\"use client\";\n\nimport { useRef, useContext, useEffect, useMemo } from \"react\";\nimport { CopilotContext } from \"../context/copilot-context\";\nimport { AnnotatedFunction } from \"../types/annotated-function\";\nimport { nanoid } from \"nanoid\";\n\nexport function useMakeCopilotActionable<ActionInput extends any[]>(\n annotatedFunction: AnnotatedFunction<ActionInput>,\n dependencies: any[],\n) {\n const idRef = useRef(nanoid()); // generate a unique id\n const { setEntryPoint, removeEntryPoint } = useContext(CopilotContext);\n\n const memoizedAnnotatedFunction: AnnotatedFunction<ActionInput> = useMemo(\n () => ({\n name: annotatedFunction.name,\n description: annotatedFunction.description,\n argumentAnnotations: annotatedFunction.argumentAnnotations,\n implementation: annotatedFunction.implementation,\n }),\n dependencies,\n );\n\n useEffect(() => {\n setEntryPoint(idRef.current, memoizedAnnotatedFunction as AnnotatedFunction<any[]>);\n\n return () => {\n removeEntryPoint(idRef.current);\n };\n }, [memoizedAnnotatedFunction, setEntryPoint, removeEntryPoint]);\n}\n","\"use client\";\n\nimport { useContext, useEffect, useRef } from \"react\";\nimport { CopilotContext } from \"../context/copilot-context\";\n\n/**\n * Adds the given information to the Copilot context to make it readable by Copilot.\n * @param information - The information to be added to the Copilot context.\n * @param parentId - The ID of the parent context, if any.\n * @param categories - An array of categories to control which context are visible where. Particularly useful with CopilotTextarea (see `useMakeAutosuggestionFunction`)\n * @returns The ID of the added context.\n */\nexport function useMakeCopilotReadable(\n information: string,\n parentId?: string,\n categories?: string[],\n): string | undefined {\n const { addContext, removeContext } = useContext(CopilotContext);\n const idRef = useRef<string>();\n\n useEffect(() => {\n const id = addContext(information, parentId, categories);\n idRef.current = id;\n\n return () => {\n removeContext(id);\n };\n }, [information, parentId, addContext, removeContext]);\n\n return idRef.current;\n}\n","\"use client\";\n\nimport { useContext, useEffect, useRef } from \"react\";\nimport { CopilotContext } from \"../context/copilot-context\";\nimport { DocumentPointer } from \"../types\";\n\n/**\n * Makes a document readable by Copilot.\n * @param document The document to make readable.\n * @param categories The categories to associate with the document.\n * @param dependencies The dependencies to use for the effect.\n * @returns The id of the document.\n */\nexport function useMakeCopilotDocumentReadable(\n document: DocumentPointer,\n categories?: string[],\n dependencies: any[] = [],\n): string | undefined {\n const { addDocumentContext, removeDocumentContext } = useContext(CopilotContext);\n const idRef = useRef<string>();\n\n useEffect(() => {\n const id = addDocumentContext(document, categories);\n idRef.current = id;\n\n return () => {\n removeDocumentContext(id);\n };\n }, [addDocumentContext, removeDocumentContext, ...dependencies]);\n\n return idRef.current;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,SAAS,SAAS,kBAAkB;;;ACGpC,OAAO,WAAW;AAmElB,IAAM,sBAA4C;AAAA,EAChD,aAAa,CAAC;AAAA,EACd,eAAe,MAAM;AAAA,EAAC;AAAA,EACtB,kBAAkB,MAAM;AAAA,EAAC;AAAA,EACzB,uCAAuC,MAAM,sBAAsB,CAAC,CAAC;AAAA,EACrE,wBAAwB,MAAM,sBAAsB,MAAY;AAAA,EAAC,EAAC;AAAA,EAElE,kBAAkB,CAAC,WAA8B,eAC/C,sBAAsB,EAAE;AAAA,EAC1B,YAAY,MAAM;AAAA,EAClB,eAAe,MAAM;AAAA,EAAC;AAAA,EAEtB,qBAAqB,CAAC,eAAyB,sBAAsB,CAAC,CAAC;AAAA,EACvE,oBAAoB,MAAM,sBAAsB,EAAE;AAAA,EAClD,uBAAuB,MAAM;AAAA,EAAC;AAAA,EAE9B,kBAAkB,IAAK,MAAkC;AAAA,IACvD,IAAI,kBAA0B;AAC5B,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAAA,IACA,IAAI,oBAA4B;AAC9B,YAAM,IAAI;AAAA,QACR;AAAA,MACF;AAAA,IACF;AAAA,IACA,IAAI,UAAkC;AACpC,aAAO,CAAC;AAAA,IACV;AAAA,IACA,IAAI,OAA4B;AAC9B,aAAO,CAAC;AAAA,IACV;AAAA,EACF,EAAG;AACL;AAEO,IAAM,iBAAiB,MAAM,cAAoC,mBAAmB;AAE3F,SAAS,sBAAyB,OAAa;AAC7C,QAAM,IAAI;AAAA,IACR;AAAA,EACF;AACA,SAAO;AACT;;;ACjHA,SAAS,gBAAgB;AAEzB,SAAS,cAAc;;;ACFvB,OAAOA,mBAAkB;;;ACAzB,OAAO,kBAAkB;AA0BzB,IAAM,gBAAgB;AAEf,IAAM,0BAAN,cAAsC,aAA4C;AAAA,EAKvF,YAAY,EAAE,IAAI,GAAyC;AACzD,UAAM;AALR,SAAQ,SAAS,IAAI,WAAW;AAChC,SAAQ,aAA6D;AAKnE,SAAK,MAAM;AAAA,EACb;AAAA,EAEc,UAAU;AAAA;AACtB,UAAI,KAAK,YAAY;AACnB,YAAI;AACF,gBAAM,KAAK,WAAW,OAAO;AAAA,QAC/B,SAAS,OAAP;AACA,kBAAQ,KAAK,iCAAiC,KAAK;AAAA,QACrD;AAAA,MACF;AACA,WAAK,aAAa;AAClB,WAAK,SAAS,IAAI,WAAW;AAAA,IAC/B;AAAA;AAAA,EAEa,MAAM,IASmC;AAAA,+CATnC;AAAA,MACjB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF,GAAsD;AACpD,YAAM,KAAK,QAAQ;AAEnB,oCAAgB;AAChB,gCAAc,CAAC;AACf,wBAAU;AAGV,YAAM,kBAAkB,SAAS,IAAI,CAAC,YAAY;AAChD,cAAM,EAAE,SAAS,MAAM,MAAM,cAAc,IAAI;AAC/C,eAAO,EAAE,SAAS,MAAM,MAAM,cAAc;AAAA,MAC9C,CAAC;AAED,UAAI;AACF,cAAM,WAAW,MAAM,MAAM,KAAK,KAAK;AAAA,UACrC,QAAQ;AAAA,UACR,SAAS;AAAA,YACP,gBAAgB;AAAA,aACb,cAAc,UACb,UAAU,mBAAK,WAAY,CAAC;AAAA,UAElC,MAAM,KAAK,UAAU;AAAA,YACnB;AAAA,YACA,UAAU;AAAA,YACV,QAAQ;AAAA,aACJ,UAAU,SAAS,EAAE,UAAU,IAAI,CAAC,IACpC,cAAc,EAAE,YAAY,IAAI,CAAC,IACjC,UAAU,UAAU,IAAI,EAAE,eAAe,OAAO,IAAI,CAAC,IACtD,cAAc,OACb,OAAO,mBAAK,QAAS,CAAC,EAC3B;AAAA,UACD;AAAA,QACF,CAAC;AAED,YAAI,CAAC,SAAS,IAAI;AAChB,cAAI;AACF,kBAAM,YAAY,MAAM,SAAS,KAAK;AACtC,kBAAM,KAAK,QAAQ;AACnB,kBAAM,MAAM,SAAS,SAAS,WAAW;AACzC,iBAAK,KAAK,SAAS,IAAI,MAAM,GAAG,CAAC;AAAA,UACnC,SAAS,QAAP;AACA,kBAAM,KAAK,QAAQ;AACnB,kBAAM,MAAM,SAAS,SAAS,WAAW,SAAS;AAClD,iBAAK,KAAK,SAAS,IAAI,MAAM,GAAG,CAAC;AAAA,UACnC;AACA;AAAA,QACF;AAEA,YAAI,SAAS,QAAQ,MAAM;AACzB,gBAAM,KAAK,QAAQ;AACnB,gBAAM,MAAM;AACZ,eAAK,KAAK,SAAS,IAAI,MAAM,GAAG,CAAC;AACjC;AAAA,QACF;AAEA,aAAK,aAAa,SAAS,KAAK,UAAU;AAE1C,cAAM,KAAK,WAAW;AAAA,MACxB,SAAS,OAAP;AACA,cAAM,KAAK,QAAQ;AACnB,aAAK,KAAK,SAAS,KAAK;AACxB;AAAA,MACF;AAAA,IACF;AAAA;AAAA,EAEc,aAAa;AAAA;AACzB,aAAO,MAAM;AACX,YAAI;AACF,gBAAM,EAAE,MAAM,MAAM,IAAI,MAAM,KAAK,WAAY,KAAK;AAEpD,cAAI,MAAM;AACR,kBAAM,KAAK,QAAQ;AACnB,iBAAK,KAAK,KAAK;AACf;AAAA,UACF;AAEA,gBAAM,iBAAiB,MAAM,KAAK,YAAY,KAAK;AAEnD,cAAI,CAAC,gBAAgB;AACnB;AAAA,UACF;AAAA,QACF,SAAS,OAAP;AACA,gBAAM,KAAK,QAAQ;AACnB,eAAK,KAAK,SAAS,KAAK;AACxB;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA;AAAA,EAEc,YAAY,MAAoC;AAAA;AAE5D,YAAM,YAAY,IAAI,WAAW,KAAK,OAAO,SAAS,KAAK,MAAM;AACjE,gBAAU,IAAI,KAAK,MAAM;AACzB,gBAAU,IAAI,MAAM,KAAK,OAAO,MAAM;AACtC,WAAK,SAAS;AAEd,YAAM,aAAa,IAAI,YAAY,OAAO,EAAE,OAAO,KAAK,MAAM;AAE9D,UAAI,QAAQ,WAAW,MAAM,IAAI,EAAE,OAAO,CAAC,SAAS,KAAK,KAAK,MAAM,EAAE;AAGtE,UAAI,CAAC,WAAW,SAAS,IAAI,GAAG;AAC9B,cAAM,WAAW,MAAM,IAAI,KAAK;AAChC,cAAM,iBAAiB,IAAI,YAAY,EAAE,OAAO,QAAQ;AACxD,aAAK,SAAS,IAAI,WAAW,cAAc;AAAA,MAC7C,OAAO;AACL,aAAK,SAAS,IAAI,WAAW;AAAA,MAC/B;AAEA,iBAAW,QAAQ,OAAO;AACxB,cAAM,cAAc,KAAK,QAAQ,WAAW,EAAE;AAE9C,YAAI,gBAAgB,UAAU;AAC5B,gBAAM,KAAK,QAAQ;AACnB,eAAK,KAAK,KAAK;AACf,iBAAO;AAAA,QACT;AAEA,YAAI;AACJ,YAAI;AACF,iBAAO,KAAK,MAAM,WAAW;AAAA,QAC/B,SAAS,OAAP;AACA,kBAAQ,MAAM,yBAAyB,KAAK;AAC5C;AAAA,QACF;AAEA,aAAK,KAAK,QAAQ,IAAI;AAAA,MACxB;AACA,aAAO;AAAA,IACT;AAAA;AACF;;;ADzJA,IAAM,qBAAqB;AAEpB,IAAM,uBAAN,cAAmCC,cAAyC;AAAA,EASjF,YAAY,QAA2C;AACrD,UAAM;AANR,SAAQ,0BAA0D;AAClE,SAAQ,OAAsC;AAC9C,SAAQ,mBAA2B;AACnC,SAAQ,wBAAgC;AA8ExC,SAAQ,SAAS,CAAC,SAA8B;AAE9C,UAAI,KAAK,SAAS,cAAc,CAAC,KAAK,QAAQ,CAAC,EAAE,MAAM,eAAe;AACpE,cAAM,UAAU,KAAK,qBAAqB;AAC1C,YAAI,CAAC,SAAS;AACZ;AAAA,QACF;AAAA,MACF;AAEA,WAAK,OAAO,KAAK,QAAQ,CAAC,EAAE,MAAM,gBAAgB,aAAa;AAE/D,UAAI,KAAK,SAAS,WAAW;AAG3B,YAAI,KAAK,QAAQ,CAAC,EAAE,MAAM,SAAS;AACjC,eAAK,KAAK,WAAW,KAAK,QAAQ,CAAC,EAAE,MAAM,OAAO;AAAA,QACpD;AAEA;AAAA,MACF,WAAW,KAAK,SAAS,YAAY;AAGnC,YAAI,KAAK,QAAQ,CAAC,EAAE,MAAM,cAAe,MAAM;AAC7C,eAAK,mBAAmB,KAAK,QAAQ,CAAC,EAAE,MAAM,cAAe;AAAA,QAC/D;AACA,YAAI,KAAK,QAAQ,CAAC,EAAE,MAAM,cAAe,WAAW;AAClD,eAAK,yBAAyB,KAAK,QAAQ,CAAC,EAAE,MAAM,cAAe;AAAA,QACrE;AACA,aAAK,KAAK,WAAW,KAAK,kBAAkB,KAAK,qBAAqB;AAEtE;AAAA,MACF;AAAA,IACF;AAEA,SAAQ,UAAU,CAAC,UAAe;AAChC,WAAK,KAAK,SAAS,KAAK;AACxB,WAAK,QAAQ;AAAA,IACf;AAEA,SAAQ,QAAQ,MAAM;AACpB,UAAI,KAAK,SAAS,YAAY;AAC5B,cAAM,UAAU,KAAK,qBAAqB;AAC1C,YAAI,CAAC,SAAS;AACZ;AAAA,QACF;AAAA,MACF;AACA,WAAK,KAAK,KAAK;AACf,WAAK,QAAQ;AAAA,IACf;AA1HE,SAAK,QAAQ,OAAO;AACpB,SAAK,MAAM,OAAO;AAAA,EACpB;AAAA,EAEa,MAAM,QAA4C;AAAA;AAC7D,eAAS,mBAAK;AACd,UAAI,KAAK,SAAS,KAAK,SAAS,kBAAkB;AAChD,eAAO,cAAP,OAAO,YAAc,iBAAiB,KAAK,KAAK;AAAA,MAClD,OAAO;AACL,eAAO,cAAP,OAAO,YAAc;AAAA,MACvB;AAEA,aAAO,cAAP,OAAO,YAAc,CAAC;AACtB,aAAO,QAAQ,KAAK;AACpB,aAAO,WAAW,KAAK,YAAY,MAAM;AACzC,aAAO,MAAM,KAAK,UAAU,MAAM;AAAA,IACpC;AAAA;AAAA,EAEQ,YAAY,QAAuD;AACzE,QAAI,YAAY,OAAO;AACvB,UAAM,WAAW,OAAO;AACxB,UAAM,YAAY,OAAO;AACzB,UAAM,qBAAqB,qBAAqB,SAAS;AACzD,QAAI,qBAAqB,WAAW;AAClC,YAAM,IAAI,MAAM,sCAAsC,wBAAwB,WAAW;AAAA,IAC3F;AACA,iBAAa;AAEb,eAAW,WAAW,UAAU;AAC9B,UAAI,QAAQ,SAAS,UAAU;AAC7B,cAAM,YAAY,KAAK,YAAY,OAAO;AAC1C,qBAAa;AAEb,YAAI,YAAY,GAAG;AACjB,gBAAM,IAAI,MAAM,uCAAuC;AAAA,QACzD;AAAA,MACF;AAAA,IACF;AAEA,UAAM,SAAoB,CAAC;AAC3B,QAAI,SAAkB;AAEtB,UAAM,mBAAmB,CAAC,GAAG,QAAQ,EAAE,QAAQ;AAC/C,eAAW,WAAW,kBAAkB;AACtC,UAAI,QAAQ,SAAS,UAAU;AAC7B,eAAO,QAAQ,OAAO;AACtB;AAAA,MACF,WAAW,QAAQ;AACjB;AAAA,MACF;AACA,UAAI,YAAY,KAAK,YAAY,OAAO;AACxC,UAAI,YAAY,WAAW;AACzB,iBAAS;AACT;AAAA,MACF;AACA,aAAO,QAAQ,OAAO;AACtB,mBAAa;AAAA,IACf;AAEA,WAAO;AAAA,EACT;AAAA,EAEc,UAAU,QAA2D;AAAA;AACjF,WAAK,0BAA0B,IAAI,wBAAwB;AAAA,QACzD,KAAK,KAAK;AAAA,MACZ,CAAC;AAED,WAAK,wBAAwB,GAAG,QAAQ,KAAK,MAAM;AACnD,WAAK,wBAAwB,GAAG,SAAS,KAAK,OAAO;AACrD,WAAK,wBAAwB,GAAG,OAAO,KAAK,KAAK;AAEjD,YAAM,KAAK,wBAAwB,MAAM,MAAM;AAAA,IACjD;AAAA;AAAA,EAoDQ,uBAAgC;AACtC,QAAI,OAAY;AAChB,QAAI;AACF,aAAO,KAAK,MAAM,KAAK,qBAAqB;AAAA,IAC9C,SAAS,OAAP;AACA,WAAK,KAAK,SAAS,KAAK;AACxB,WAAK,QAAQ;AACb,aAAO;AAAA,IACT;AACA,SAAK,KAAK,YAAY;AAAA,MACpB,MAAM,KAAK;AAAA,MACX,WAAW;AAAA,IACb,CAAC;AACD,SAAK,OAAO;AACZ,SAAK,mBAAmB;AACxB,SAAK,wBAAwB;AAC7B,WAAO;AAAA,EACT;AAAA,EAEQ,UAAU;AAChB,QAAI,KAAK,yBAAyB;AAChC,WAAK,wBAAwB,IAAI,QAAQ,KAAK,MAAM;AACpD,WAAK,wBAAwB,IAAI,SAAS,KAAK,OAAO;AACtD,WAAK,wBAAwB,IAAI,OAAO,KAAK,KAAK;AAAA,IACpD;AACA,SAAK,0BAA0B;AAC/B,SAAK,OAAO;AACZ,SAAK,mBAAmB;AACxB,SAAK,wBAAwB;AAAA,EAC/B;AAAA,EAEO,YAAY,SAA0B;AAC3C,QAAI,QAAQ,SAAS;AACnB,aAAO,eAAe,QAAQ,OAAO;AAAA,IACvC,WAAW,QAAQ,eAAe;AAChC,aAAO,eAAe,KAAK,UAAU,QAAQ,aAAa,CAAC;AAAA,IAC7D;AACA,WAAO;AAAA,EACT;AACF;AAEA,IAAM,mBAA8C;AAAA,EAClD,iBAAiB;AAAA,EACjB,qBAAqB;AAAA,EACrB,SAAS;AAAA,EACT,sBAAsB;AAAA,EACtB,aAAa;AAAA,EACb,sBAAsB;AAAA,EACtB,cAAc;AAAA,EACd,kBAAkB;AAAA,EAClB,sBAAsB;AAAA,EACtB,cAAc;AAAA,EACd,kBAAkB;AAAA,EAClB,0BAA0B;AAC5B;AAEA,SAAS,eAAe,MAAsB;AAC5C,SAAO,KAAK,SAAS;AACvB;AAEA,SAAS,qBAAqB,WAA+B;AAC3D,MAAI,UAAU,WAAW,GAAG;AAC1B,WAAO;AAAA,EACT;AACA,QAAM,OAAO,KAAK,UAAU,SAAS;AACrC,SAAO,eAAe,IAAI;AAC5B;;;AD7JO,SAAS,QAAQ,SAA0D;AAChF,QAAM,CAAC,UAAU,WAAW,IAAI,SAAoB,CAAC,CAAC;AACtD,QAAM,CAAC,OAAO,QAAQ,IAAI,SAAS,EAAE;AACrC,QAAM,CAAC,WAAW,YAAY,IAAI,SAAS,KAAK;AAEhD,QAAM,oBAAoB,CAAOC,cAA0C;AACzE,WAAO,IAAI,QAAiB,CAAC,SAAS,WAAW;AAC/C,mBAAa,IAAI;AAEjB,YAAM,mBAA4B;AAAA,QAChC,IAAI,OAAO;AAAA,QACX,WAAW,oBAAI,KAAK;AAAA,QACpB,SAAS;AAAA,QACT,MAAM;AAAA,MACR;AAGA,kBAAY,CAAC,GAAGA,WAAU,mBAAK,iBAAkB,CAAC;AAElD,YAAM,sBAAsB,CAAC,GAAI,QAAQ,mBAAmB,CAAC,GAAI,GAAGA,SAAQ;AAE5E,YAAM,SAAS,IAAI,qBAAqB;AAAA,QACtC,KAAK,QAAQ,OAAO;AAAA,MACtB,CAAC;AAED,YAAM,UAAU,MAAM;AACpB,eAAO,IAAI,SAAS;AACpB,eAAO,IAAI,KAAK;AAChB,eAAO,IAAI,OAAO;AAClB,eAAO,IAAI,UAAU;AAAA,MACvB;AAEA,aAAO,GAAG,WAAW,CAAC,YAAY;AAChC,yBAAiB,WAAW;AAC5B,oBAAY,CAAC,GAAGA,WAAU,mBAAK,iBAAkB,CAAC;AAAA,MACpD,CAAC;AAED,aAAO,GAAG,OAAO,MAAM;AACrB,qBAAa,KAAK;AAClB,gBAAQ;AACR,gBAAQ,mBAAK,iBAAkB;AAAA,MACjC,CAAC;AAED,aAAO,GAAG,SAAS,CAAC,UAAU;AAC5B,qBAAa,KAAK;AAClB,gBAAQ;AACR,eAAO,KAAK;AAAA,MACd,CAAC;AAED,aAAO,GAAG,YAAY,CAAO,iBAAiB;AAC5C,yBAAiB,gBAAgB;AAAA,UAC/B,MAAM,aAAa;AAAA,UACnB,WAAW,KAAK,UAAU,aAAa,SAAS;AAAA,QAClD;AACA,oBAAY,CAAC,GAAGA,WAAU,mBAAK,iBAAkB,CAAC;AAElD,qBAAa,KAAK;AAClB,gBAAQ;AACR,gBAAQ,mBAAK,iBAAkB;AAAA,MACjC,EAAC;AAED,aAAO,MAAM;AAAA,QACX,UAAU;AAAA,QACV,WAAW,QAAQ;AAAA,QACnB,SAAS,QAAQ;AAAA,QACjB,eAAe,QAAQ;AAAA,MACzB,CAAC;AAAA,IACH,CAAC;AAAA,EACH;AAEA,QAAM,yCAAyC,CAAOA,cAAuC;AAC3F,UAAM,UAAU,MAAM,kBAAkBA,SAAQ;AAChD,QAAI,QAAQ,iBAAiB,QAAQ,gBAAgB;AACnD,YAAM,QAAQ,eAAeA,WAAU,QAAQ,aAAa;AAAA,IAC9D;AAAA,EACF;AAEA,QAAM,SAAS,CAAO,YAAoC;AACxD,QAAI,WAAW;AACb;AAAA,IACF;AACA,UAAM,cAAc,CAAC,GAAG,UAAU,OAAO;AACzC,gBAAY,WAAW;AACvB,WAAO,uCAAuC,WAAW;AAAA,EAC3D;AAEA,QAAM,SAAS,MAA2B;AACxC,QAAI,aAAa,SAAS,WAAW,GAAG;AACtC;AAAA,IACF;AACA,QAAI,cAAc,CAAC,GAAG,QAAQ;AAC9B,UAAM,cAAc,SAAS,SAAS,SAAS,CAAC;AAEhD,QAAI,YAAY,SAAS,aAAa;AACpC,oBAAc,YAAY,MAAM,GAAG,EAAE;AAAA,IACvC;AACA,gBAAY,WAAW;AAEvB,WAAO,uCAAuC,WAAW;AAAA,EAC3D;AAEA,QAAM,OAAO,MAAY;AACvB,UAAM,IAAI,MAAM,iBAAiB;AAAA,EACnC;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;;;AGlMA,SAAS,aAAa,YAAAC,iBAAgB;AA0JlC;AAqBG,IAAM,kCAAkC,CAAC,QAAQ;;;AL5JjD,SAAS,eAAe,IAGiB;AAHjB,eAC7B;AAAA;AAAA,EAtBF,IAqB+B,IAE1B,oBAF0B,IAE1B;AAAA,IADH;AAAA;AAGA,QAAM;AAAA,IACJ;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,IAAI,WAAW,cAAc;AAE7B,QAAM,gBAAyB,QAAQ,MAAM;AAC3C,UAAM,qBAAqB,qBAAqB;AAChD,UAAM,gBAAgB,iBAAiB,CAAC,GAAG,+BAA+B;AAE1E,WAAO;AAAA,MACL,IAAI;AAAA,MACJ,SAAS,mBAAmB,aAAa;AAAA,MACzC,MAAM;AAAA,IACR;AAAA,EACF,GAAG,CAAC,kBAAkB,iBAAiB,CAAC;AAExC,QAAM,uBAA8D,QAAQ,MAAM;AAChF,WAAO,sCAAsC;AAAA,EAC/C,GAAG,CAAC,qCAAqC,CAAC;AAE1C,QAAM,EAAE,UAAU,QAAQ,QAAQ,MAAM,WAAW,OAAO,SAAS,IAAI,QAAQ,iCAC1E,UAD0E;AAAA,IAE7E,eAAe;AAAA,IACf,IAAI,QAAQ;AAAA,IACZ,iBAAiB,CAAC,aAAa,EAAE,OAAO,QAAQ,mBAAmB,CAAC,CAAC;AAAA,IACrE,WAAW;AAAA,IACX,gBAAgB,uBAAuB;AAAA,IACvC,SAAS,mBAAK,QAAQ;AAAA,IACtB,MAAM,mBACD,QAAQ;AAAA,EAEf,EAAC;AAED,QAAM,kBAAkB,SAAS;AAAA,IAC/B,CAAC,YAAY,QAAQ,SAAS,UAAU,QAAQ,SAAS;AAAA,EAC3D;AAEA,SAAO;AAAA,IACL;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AACF;AAEO,SAAS,qBAAqB,eAA+B;AAClE,SAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQP;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAWF;;;AM7FA,SAAS,QAAQ,cAAAC,aAAY,WAAW,WAAAC,gBAAe;AAGvD,SAAS,UAAAC,eAAc;AAEhB,SAAS,yBACd,mBACA,cACA;AACA,QAAM,QAAQ,OAAOA,QAAO,CAAC;AAC7B,QAAM,EAAE,eAAe,iBAAiB,IAAIC,YAAW,cAAc;AAErE,QAAM,4BAA4DC;AAAA,IAChE,OAAO;AAAA,MACL,MAAM,kBAAkB;AAAA,MACxB,aAAa,kBAAkB;AAAA,MAC/B,qBAAqB,kBAAkB;AAAA,MACvC,gBAAgB,kBAAkB;AAAA,IACpC;AAAA,IACA;AAAA,EACF;AAEA,YAAU,MAAM;AACd,kBAAc,MAAM,SAAS,yBAAqD;AAElF,WAAO,MAAM;AACX,uBAAiB,MAAM,OAAO;AAAA,IAChC;AAAA,EACF,GAAG,CAAC,2BAA2B,eAAe,gBAAgB,CAAC;AACjE;;;AC7BA,SAAS,cAAAC,aAAY,aAAAC,YAAW,UAAAC,eAAc;AAUvC,SAAS,uBACd,aACA,UACA,YACoB;AACpB,QAAM,EAAE,YAAY,cAAc,IAAIC,YAAW,cAAc;AAC/D,QAAM,QAAQC,QAAe;AAE7B,EAAAC,WAAU,MAAM;AACd,UAAM,KAAK,WAAW,aAAa,UAAU,UAAU;AACvD,UAAM,UAAU;AAEhB,WAAO,MAAM;AACX,oBAAc,EAAE;AAAA,IAClB;AAAA,EACF,GAAG,CAAC,aAAa,UAAU,YAAY,aAAa,CAAC;AAErD,SAAO,MAAM;AACf;;;AC5BA,SAAS,cAAAC,aAAY,aAAAC,YAAW,UAAAC,eAAc;AAWvC,SAAS,+BACd,UACA,YACA,eAAsB,CAAC,GACH;AACpB,QAAM,EAAE,oBAAoB,sBAAsB,IAAIC,YAAW,cAAc;AAC/E,QAAM,QAAQC,QAAe;AAE7B,EAAAC,WAAU,MAAM;AACd,UAAM,KAAK,mBAAmB,UAAU,UAAU;AAClD,UAAM,UAAU;AAEhB,WAAO,MAAM;AACX,4BAAsB,EAAE;AAAA,IAC1B;AAAA,EACF,GAAG,CAAC,oBAAoB,uBAAuB,GAAG,YAAY,CAAC;AAE/D,SAAO,MAAM;AACf;","names":["EventEmitter","EventEmitter","messages","useState","useContext","useMemo","nanoid","useContext","useMemo","useContext","useEffect","useRef","useContext","useRef","useEffect","useContext","useEffect","useRef","useContext","useRef","useEffect"]}
@@ -0,0 +1,84 @@
1
+ import { Message, FunctionCallHandler, Function } from '../types/base.js';
2
+ import { CopilotApiConfig } from '../context/copilot-context.js';
3
+ import '../types/annotated-function.js';
4
+ import '../types/document-pointer.js';
5
+ import 'react';
6
+ import './use-tree.js';
7
+ import 'openai/resources/chat';
8
+
9
+ type UseChatOptions = {
10
+ /**
11
+ * The API endpoint that accepts a `{ messages: Message[] }` object and returns
12
+ * a stream of tokens of the AI chat response. Defaults to `/api/chat`.
13
+ */
14
+ api?: string;
15
+ /**
16
+ * A unique identifier for the chat. If not provided, a random one will be
17
+ * generated. When provided, the `useChat` hook with the same `id` will
18
+ * have shared states across components.
19
+ */
20
+ id?: string;
21
+ /**
22
+ * System messages of the chat. Defaults to an empty array.
23
+ */
24
+ initialMessages?: Message[];
25
+ /**
26
+ * Callback function to be called when a function call is received.
27
+ * If the function returns a `ChatRequest` object, the request will be sent
28
+ * automatically to the API and will be used to update the chat.
29
+ */
30
+ onFunctionCall?: FunctionCallHandler;
31
+ /**
32
+ * HTTP headers to be sent with the API request.
33
+ */
34
+ headers?: Record<string, string> | Headers;
35
+ /**
36
+ * Extra body object to be sent with the API request.
37
+ * @example
38
+ * Send a `sessionId` to the API along with the messages.
39
+ * ```js
40
+ * useChat({
41
+ * body: {
42
+ * sessionId: '123',
43
+ * }
44
+ * })
45
+ * ```
46
+ */
47
+ body?: object;
48
+ /**
49
+ * Function definitions to be sent to the API.
50
+ */
51
+ functions?: Function[];
52
+ };
53
+ type UseChatHelpers = {
54
+ /** Current messages in the chat */
55
+ messages: Message[];
56
+ /**
57
+ * Append a user message to the chat list. This triggers the API call to fetch
58
+ * the assistant's response.
59
+ * @param message The message to append
60
+ */
61
+ append: (message: Message) => Promise<void>;
62
+ /**
63
+ * Reload the last AI chat response for the given chat history. If the last
64
+ * message isn't from the assistant, it will request the API to generate a
65
+ * new response.
66
+ */
67
+ reload: () => Promise<void>;
68
+ /**
69
+ * Abort the current request immediately, keep the generated tokens if any.
70
+ */
71
+ stop: () => void;
72
+ /** The current value of the input */
73
+ input: string;
74
+ /** setState-powered method to update the input value */
75
+ setInput: React.Dispatch<React.SetStateAction<string>>;
76
+ /** Whether the API request is in progress */
77
+ isLoading: boolean;
78
+ };
79
+ type UseChatOptionsWithCopilotConfig = UseChatOptions & {
80
+ copilotConfig: CopilotApiConfig;
81
+ };
82
+ declare function useChat(options: UseChatOptionsWithCopilotConfig): UseChatHelpers;
83
+
84
+ export { UseChatHelpers, UseChatOptions, UseChatOptionsWithCopilotConfig, useChat };
@@ -0,0 +1,461 @@
1
+ var __defProp = Object.defineProperty;
2
+ var __getOwnPropSymbols = Object.getOwnPropertySymbols;
3
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
4
+ var __propIsEnum = Object.prototype.propertyIsEnumerable;
5
+ var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
6
+ var __spreadValues = (a, b) => {
7
+ for (var prop in b || (b = {}))
8
+ if (__hasOwnProp.call(b, prop))
9
+ __defNormalProp(a, prop, b[prop]);
10
+ if (__getOwnPropSymbols)
11
+ for (var prop of __getOwnPropSymbols(b)) {
12
+ if (__propIsEnum.call(b, prop))
13
+ __defNormalProp(a, prop, b[prop]);
14
+ }
15
+ return a;
16
+ };
17
+ var __async = (__this, __arguments, generator) => {
18
+ return new Promise((resolve, reject) => {
19
+ var fulfilled = (value) => {
20
+ try {
21
+ step(generator.next(value));
22
+ } catch (e) {
23
+ reject(e);
24
+ }
25
+ };
26
+ var rejected = (value) => {
27
+ try {
28
+ step(generator.throw(value));
29
+ } catch (e) {
30
+ reject(e);
31
+ }
32
+ };
33
+ var step = (x) => x.done ? resolve(x.value) : Promise.resolve(x.value).then(fulfilled, rejected);
34
+ step((generator = generator.apply(__this, __arguments)).next());
35
+ });
36
+ };
37
+
38
+ // src/hooks/use-chat.ts
39
+ import { useState } from "react";
40
+ import { nanoid } from "nanoid";
41
+
42
+ // src/openai/chat-completion-client.ts
43
+ import EventEmitter2 from "eventemitter3";
44
+
45
+ // src/openai/chat-completion-transport.ts
46
+ import EventEmitter from "eventemitter3";
47
+ var DEFAULT_MODEL = "gpt-4-1106-preview";
48
+ var ChatCompletionTransport = class extends EventEmitter {
49
+ constructor({ url }) {
50
+ super();
51
+ this.buffer = new Uint8Array();
52
+ this.bodyReader = null;
53
+ this.url = url;
54
+ }
55
+ cleanup() {
56
+ return __async(this, null, function* () {
57
+ if (this.bodyReader) {
58
+ try {
59
+ yield this.bodyReader.cancel();
60
+ } catch (error) {
61
+ console.warn("Failed to cancel body reader:", error);
62
+ }
63
+ }
64
+ this.bodyReader = null;
65
+ this.buffer = new Uint8Array();
66
+ });
67
+ }
68
+ fetch(_0) {
69
+ return __async(this, arguments, function* ({
70
+ model,
71
+ messages,
72
+ copilotConfig,
73
+ functions,
74
+ temperature,
75
+ headers,
76
+ body,
77
+ signal
78
+ }) {
79
+ yield this.cleanup();
80
+ temperature || (temperature = 0.5);
81
+ functions || (functions = []);
82
+ model || (model = DEFAULT_MODEL);
83
+ const cleanedMessages = messages.map((message) => {
84
+ const { content, role, name, function_call } = message;
85
+ return { content, role, name, function_call };
86
+ });
87
+ try {
88
+ const response = yield fetch(this.url, {
89
+ method: "POST",
90
+ headers: __spreadValues(__spreadValues({
91
+ "Content-Type": "application/json"
92
+ }, copilotConfig.headers), headers ? __spreadValues({}, headers) : {}),
93
+ body: JSON.stringify(__spreadValues(__spreadValues(__spreadValues(__spreadValues(__spreadValues({
94
+ model,
95
+ messages: cleanedMessages,
96
+ stream: true
97
+ }, functions.length ? { functions } : {}), temperature ? { temperature } : {}), functions.length != 0 ? { function_call: "auto" } : {}), copilotConfig.body), body ? __spreadValues({}, body) : {})),
98
+ signal
99
+ });
100
+ if (!response.ok) {
101
+ try {
102
+ const errorText = yield response.text();
103
+ yield this.cleanup();
104
+ const msg = `Error ${response.status}: ${errorText}`;
105
+ this.emit("error", new Error(msg));
106
+ } catch (_error) {
107
+ yield this.cleanup();
108
+ const msg = `Error ${response.status}: ${response.statusText}`;
109
+ this.emit("error", new Error(msg));
110
+ }
111
+ return;
112
+ }
113
+ if (response.body == null) {
114
+ yield this.cleanup();
115
+ const msg = "Response body is null";
116
+ this.emit("error", new Error(msg));
117
+ return;
118
+ }
119
+ this.bodyReader = response.body.getReader();
120
+ yield this.streamBody();
121
+ } catch (error) {
122
+ yield this.cleanup();
123
+ this.emit("error", error);
124
+ return;
125
+ }
126
+ });
127
+ }
128
+ streamBody() {
129
+ return __async(this, null, function* () {
130
+ while (true) {
131
+ try {
132
+ const { done, value } = yield this.bodyReader.read();
133
+ if (done) {
134
+ yield this.cleanup();
135
+ this.emit("end");
136
+ return;
137
+ }
138
+ const shouldContinue = yield this.processData(value);
139
+ if (!shouldContinue) {
140
+ return;
141
+ }
142
+ } catch (error) {
143
+ yield this.cleanup();
144
+ this.emit("error", error);
145
+ return;
146
+ }
147
+ }
148
+ });
149
+ }
150
+ processData(data) {
151
+ return __async(this, null, function* () {
152
+ const newBuffer = new Uint8Array(this.buffer.length + data.length);
153
+ newBuffer.set(this.buffer);
154
+ newBuffer.set(data, this.buffer.length);
155
+ this.buffer = newBuffer;
156
+ const dataString = new TextDecoder("utf-8").decode(this.buffer);
157
+ let lines = dataString.split("\n").filter((line) => line.trim() !== "");
158
+ if (!dataString.endsWith("\n")) {
159
+ const lastLine = lines.pop() || "";
160
+ const remainingBytes = new TextEncoder().encode(lastLine);
161
+ this.buffer = new Uint8Array(remainingBytes);
162
+ } else {
163
+ this.buffer = new Uint8Array();
164
+ }
165
+ for (const line of lines) {
166
+ const cleanedLine = line.replace(/^data: /, "");
167
+ if (cleanedLine === "[DONE]") {
168
+ yield this.cleanup();
169
+ this.emit("end");
170
+ return false;
171
+ }
172
+ let json;
173
+ try {
174
+ json = JSON.parse(cleanedLine);
175
+ } catch (error) {
176
+ console.error("Failed to parse JSON:", error);
177
+ continue;
178
+ }
179
+ this.emit("data", json);
180
+ }
181
+ return true;
182
+ });
183
+ }
184
+ };
185
+
186
+ // src/openai/chat-completion-client.ts
187
+ var DEFAULT_MAX_TOKENS = 8192;
188
+ var ChatCompletionClient = class extends EventEmitter2 {
189
+ constructor(params) {
190
+ super();
191
+ this.chatCompletionTransport = null;
192
+ this.mode = null;
193
+ this.functionCallName = "";
194
+ this.functionCallArguments = "";
195
+ this.onData = (data) => {
196
+ if (this.mode === "function" && !data.choices[0].delta.function_call) {
197
+ const success = this.tryFlushFunctionCall();
198
+ if (!success) {
199
+ return;
200
+ }
201
+ }
202
+ this.mode = data.choices[0].delta.function_call ? "function" : "message";
203
+ if (this.mode === "message") {
204
+ if (data.choices[0].delta.content) {
205
+ this.emit("content", data.choices[0].delta.content);
206
+ }
207
+ return;
208
+ } else if (this.mode === "function") {
209
+ if (data.choices[0].delta.function_call.name) {
210
+ this.functionCallName = data.choices[0].delta.function_call.name;
211
+ }
212
+ if (data.choices[0].delta.function_call.arguments) {
213
+ this.functionCallArguments += data.choices[0].delta.function_call.arguments;
214
+ }
215
+ this.emit("partial", this.functionCallName, this.functionCallArguments);
216
+ return;
217
+ }
218
+ };
219
+ this.onError = (error) => {
220
+ this.emit("error", error);
221
+ this.cleanup();
222
+ };
223
+ this.onEnd = () => {
224
+ if (this.mode === "function") {
225
+ const success = this.tryFlushFunctionCall();
226
+ if (!success) {
227
+ return;
228
+ }
229
+ }
230
+ this.emit("end");
231
+ this.cleanup();
232
+ };
233
+ this.model = params.model;
234
+ this.url = params.url;
235
+ }
236
+ fetch(params) {
237
+ return __async(this, null, function* () {
238
+ params = __spreadValues({}, params);
239
+ if (this.model && this.model in maxTokensByModel) {
240
+ params.maxTokens || (params.maxTokens = maxTokensByModel[this.model]);
241
+ } else {
242
+ params.maxTokens || (params.maxTokens = DEFAULT_MAX_TOKENS);
243
+ }
244
+ params.functions || (params.functions = []);
245
+ params.model = this.model;
246
+ params.messages = this.buildPrompt(params);
247
+ return yield this.runPrompt(params);
248
+ });
249
+ }
250
+ buildPrompt(params) {
251
+ let maxTokens = params.maxTokens;
252
+ const messages = params.messages;
253
+ const functions = params.functions;
254
+ const functionsNumTokens = countFunctionsTokens(functions);
255
+ if (functionsNumTokens > maxTokens) {
256
+ throw new Error(`Too many tokens in function calls: ${functionsNumTokens} > ${maxTokens}`);
257
+ }
258
+ maxTokens -= functionsNumTokens;
259
+ for (const message of messages) {
260
+ if (message.role === "system") {
261
+ const numTokens = this.countTokens(message);
262
+ maxTokens -= numTokens;
263
+ if (maxTokens < 0) {
264
+ throw new Error("Not enough tokens for system message.");
265
+ }
266
+ }
267
+ }
268
+ const result = [];
269
+ let cutoff = false;
270
+ const reversedMessages = [...messages].reverse();
271
+ for (const message of reversedMessages) {
272
+ if (message.role === "system") {
273
+ result.unshift(message);
274
+ continue;
275
+ } else if (cutoff) {
276
+ continue;
277
+ }
278
+ let numTokens = this.countTokens(message);
279
+ if (maxTokens < numTokens) {
280
+ cutoff = true;
281
+ continue;
282
+ }
283
+ result.unshift(message);
284
+ maxTokens -= numTokens;
285
+ }
286
+ return result;
287
+ }
288
+ runPrompt(params) {
289
+ return __async(this, null, function* () {
290
+ this.chatCompletionTransport = new ChatCompletionTransport({
291
+ url: this.url
292
+ });
293
+ this.chatCompletionTransport.on("data", this.onData);
294
+ this.chatCompletionTransport.on("error", this.onError);
295
+ this.chatCompletionTransport.on("end", this.onEnd);
296
+ yield this.chatCompletionTransport.fetch(params);
297
+ });
298
+ }
299
+ tryFlushFunctionCall() {
300
+ let args = null;
301
+ try {
302
+ args = JSON.parse(this.functionCallArguments);
303
+ } catch (error) {
304
+ this.emit("error", error);
305
+ this.cleanup();
306
+ return false;
307
+ }
308
+ this.emit("function", {
309
+ name: this.functionCallName,
310
+ arguments: args
311
+ });
312
+ this.mode = null;
313
+ this.functionCallName = "";
314
+ this.functionCallArguments = "";
315
+ return true;
316
+ }
317
+ cleanup() {
318
+ if (this.chatCompletionTransport) {
319
+ this.chatCompletionTransport.off("data", this.onData);
320
+ this.chatCompletionTransport.off("error", this.onError);
321
+ this.chatCompletionTransport.off("end", this.onEnd);
322
+ }
323
+ this.chatCompletionTransport = null;
324
+ this.mode = null;
325
+ this.functionCallName = "";
326
+ this.functionCallArguments = "";
327
+ }
328
+ countTokens(message) {
329
+ if (message.content) {
330
+ return estimateTokens(message.content);
331
+ } else if (message.function_call) {
332
+ return estimateTokens(JSON.stringify(message.function_call));
333
+ }
334
+ return 0;
335
+ }
336
+ };
337
+ var maxTokensByModel = {
338
+ "gpt-3.5-turbo": 4097,
339
+ "gpt-3.5-turbo-16k": 16385,
340
+ "gpt-4": 8192,
341
+ "gpt-4-1106-preview": 8192,
342
+ "gpt-4-32k": 32768,
343
+ "gpt-3.5-turbo-0301": 4097,
344
+ "gpt-4-0314": 8192,
345
+ "gpt-4-32k-0314": 32768,
346
+ "gpt-3.5-turbo-0613": 4097,
347
+ "gpt-4-0613": 8192,
348
+ "gpt-4-32k-0613": 32768,
349
+ "gpt-3.5-turbo-16k-0613": 16385
350
+ };
351
+ function estimateTokens(text) {
352
+ return text.length / 3;
353
+ }
354
+ function countFunctionsTokens(functions) {
355
+ if (functions.length === 0) {
356
+ return 0;
357
+ }
358
+ const json = JSON.stringify(functions);
359
+ return estimateTokens(json);
360
+ }
361
+
362
+ // src/hooks/use-chat.ts
363
+ function useChat(options) {
364
+ const [messages, setMessages] = useState([]);
365
+ const [input, setInput] = useState("");
366
+ const [isLoading, setIsLoading] = useState(false);
367
+ const runChatCompletion = (messages2) => __async(this, null, function* () {
368
+ return new Promise((resolve, reject) => {
369
+ setIsLoading(true);
370
+ const assistantMessage = {
371
+ id: nanoid(),
372
+ createdAt: /* @__PURE__ */ new Date(),
373
+ content: "",
374
+ role: "assistant"
375
+ };
376
+ setMessages([...messages2, __spreadValues({}, assistantMessage)]);
377
+ const messagesWithContext = [...options.initialMessages || [], ...messages2];
378
+ const client = new ChatCompletionClient({
379
+ url: options.api || "/api/copilotkit/openai"
380
+ });
381
+ const cleanup = () => {
382
+ client.off("content");
383
+ client.off("end");
384
+ client.off("error");
385
+ client.off("function");
386
+ };
387
+ client.on("content", (content) => {
388
+ assistantMessage.content += content;
389
+ setMessages([...messages2, __spreadValues({}, assistantMessage)]);
390
+ });
391
+ client.on("end", () => {
392
+ setIsLoading(false);
393
+ cleanup();
394
+ resolve(__spreadValues({}, assistantMessage));
395
+ });
396
+ client.on("error", (error) => {
397
+ setIsLoading(false);
398
+ cleanup();
399
+ reject(error);
400
+ });
401
+ client.on("function", (functionCall) => __async(this, null, function* () {
402
+ assistantMessage.function_call = {
403
+ name: functionCall.name,
404
+ arguments: JSON.stringify(functionCall.arguments)
405
+ };
406
+ setMessages([...messages2, __spreadValues({}, assistantMessage)]);
407
+ setIsLoading(false);
408
+ cleanup();
409
+ resolve(__spreadValues({}, assistantMessage));
410
+ }));
411
+ client.fetch({
412
+ messages: messagesWithContext,
413
+ functions: options.functions,
414
+ headers: options.headers,
415
+ copilotConfig: options.copilotConfig
416
+ });
417
+ });
418
+ });
419
+ const runChatCompletionAndHandleFunctionCall = (messages2) => __async(this, null, function* () {
420
+ const message = yield runChatCompletion(messages2);
421
+ if (message.function_call && options.onFunctionCall) {
422
+ yield options.onFunctionCall(messages2, message.function_call);
423
+ }
424
+ });
425
+ const append = (message) => __async(this, null, function* () {
426
+ if (isLoading) {
427
+ return;
428
+ }
429
+ const newMessages = [...messages, message];
430
+ setMessages(newMessages);
431
+ return runChatCompletionAndHandleFunctionCall(newMessages);
432
+ });
433
+ const reload = () => __async(this, null, function* () {
434
+ if (isLoading || messages.length === 0) {
435
+ return;
436
+ }
437
+ let newMessages = [...messages];
438
+ const lastMessage = messages[messages.length - 1];
439
+ if (lastMessage.role === "assistant") {
440
+ newMessages = newMessages.slice(0, -1);
441
+ }
442
+ setMessages(newMessages);
443
+ return runChatCompletionAndHandleFunctionCall(newMessages);
444
+ });
445
+ const stop = () => {
446
+ throw new Error("Not implemented");
447
+ };
448
+ return {
449
+ messages,
450
+ append,
451
+ reload,
452
+ stop,
453
+ isLoading,
454
+ input,
455
+ setInput
456
+ };
457
+ }
458
+ export {
459
+ useChat
460
+ };
461
+ //# sourceMappingURL=use-chat.mjs.map