@gram-ai/elements 1.0.6 → 1.0.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.d.ts CHANGED
@@ -1,4 +1,6 @@
1
1
  export { ElementsProvider as GramElementsProvider } from './contexts/ElementsProvider';
2
2
  export { useElements as useGramElements } from './contexts/ElementsProvider';
3
- export { Chat as GramChat } from './components/Chat';
3
+ export { Chat } from './components/Chat';
4
+ export { createFetchAdapter } from './lib/fetchAdapter';
5
+ export type { FetchAdapterOptions } from './lib/fetchAdapter';
4
6
  export type { ElementsConfig, ComposerConfig, ModalConfig, ToolsConfig, ModelConfig, WelcomeConfig, Suggestion, GramRuntimeApi, CustomRuntimeApi, } from './types';
@@ -0,0 +1,33 @@
1
+ export interface ChatHandlerOptions {
2
+ /**
3
+ * The API key for the OpenAI-compatible provider.
4
+ * Defaults to OPENAI_API_KEY environment variable.
5
+ */
6
+ apiKey?: string;
7
+ /**
8
+ * The base URL for the OpenAI-compatible API.
9
+ * Use this for OpenRouter, Azure, or other compatible providers.
10
+ * @example 'https://openrouter.ai/api/v1'
11
+ */
12
+ baseURL?: string;
13
+ /**
14
+ * The model to use for chat completions.
15
+ * @default 'gpt-4o-mini'
16
+ */
17
+ model?: string;
18
+ }
19
+ /**
20
+ * Creates a chat handler for Next.js API routes.
21
+ * Returns OpenAI-compatible SSE streaming format for use with assistant-ui.
22
+ *
23
+ * @example
24
+ * // app/api/chat/route.ts
25
+ * import { createChatHandler } from '@gram-ai/elements/server'
26
+ *
27
+ * export const POST = createChatHandler({
28
+ * apiKey: process.env.OPENROUTER_API_KEY,
29
+ * baseURL: 'https://openrouter.ai/api/v1',
30
+ * model: 'anthropic/claude-3.5-sonnet',
31
+ * })
32
+ */
33
+ export declare function createChatHandler(options?: ChatHandlerOptions): (req: Request) => Promise<Response>;
@@ -0,0 +1,18 @@
1
+ import { ChatModelAdapter } from '@assistant-ui/react';
2
+ export interface FetchAdapterOptions {
3
+ /**
4
+ * The API endpoint to send chat requests to.
5
+ * @default '/api/chat'
6
+ */
7
+ endpoint?: string;
8
+ }
9
+ /**
10
+ * Creates a chat adapter that connects to an OpenAI-compatible API endpoint.
11
+ * Handles the SSE streaming format used by OpenAI and compatible providers.
12
+ *
13
+ * @example
14
+ * import { createFetchAdapter } from '@gram-ai/elements'
15
+ *
16
+ * const adapter = createFetchAdapter({ endpoint: '/api/chat' })
17
+ */
18
+ export declare function createFetchAdapter(options?: FetchAdapterOptions): ChatModelAdapter;
@@ -0,0 +1,2 @@
1
+ export { createChatHandler } from './lib/chatHandler';
2
+ export type { ChatHandlerOptions } from './lib/chatHandler';
package/dist/server.js ADDED
@@ -0,0 +1,66 @@
1
+ import y from "openai";
2
+ function l(o) {
3
+ return o.map((e) => {
4
+ let t;
5
+ return typeof e.content == "string" ? t = e.content : Array.isArray(e.content) ? t = e.content.filter((n) => n.type === "text" && n.text).map((n) => n.text).join("") : t = "", {
6
+ role: e.role,
7
+ content: t
8
+ };
9
+ });
10
+ }
11
+ function w(o = {}) {
12
+ const {
13
+ apiKey: e = process.env.OPENAI_API_KEY,
14
+ baseURL: t,
15
+ model: n = "gpt-4o-mini"
16
+ } = o, i = new y({
17
+ apiKey: e,
18
+ baseURL: t
19
+ });
20
+ return async function(p) {
21
+ try {
22
+ const { messages: r } = await p.json(), d = await i.chat.completions.create({
23
+ model: n,
24
+ messages: l(r),
25
+ stream: !0
26
+ }), c = new TextEncoder(), u = new ReadableStream({
27
+ async start(a) {
28
+ try {
29
+ for await (const s of d) {
30
+ const f = JSON.stringify(s);
31
+ a.enqueue(c.encode(`data: ${f}
32
+
33
+ `));
34
+ }
35
+ a.enqueue(c.encode(`data: [DONE]
36
+
37
+ `)), a.close();
38
+ } catch (s) {
39
+ a.error(s);
40
+ }
41
+ }
42
+ });
43
+ return new Response(u, {
44
+ headers: {
45
+ "Content-Type": "text/event-stream",
46
+ "Cache-Control": "no-cache",
47
+ Connection: "keep-alive"
48
+ }
49
+ });
50
+ } catch (r) {
51
+ return new Response(
52
+ JSON.stringify({
53
+ error: String(r),
54
+ stack: r instanceof Error ? r.stack : void 0
55
+ }),
56
+ {
57
+ status: 500,
58
+ headers: { "Content-Type": "application/json" }
59
+ }
60
+ );
61
+ }
62
+ };
63
+ }
64
+ export {
65
+ w as createChatHandler
66
+ };
@@ -19,7 +19,7 @@ export interface ElementsConfig {
19
19
  /**
20
20
  * Whether to render the chat window inside of an expandable modal or a standalone chat window.
21
21
  */
22
- variant?: 'modal' | 'standalone';
22
+ variant?: 'widget' | 'standalone' | 'sidecar';
23
23
  /**
24
24
  * LLM model configuration.
25
25
  *
package/package.json CHANGED
@@ -2,13 +2,17 @@
2
2
  "name": "@gram-ai/elements",
3
3
  "description": "Gram Elements is a library of UI primitives for building chat-like experiences for MCP Servers.",
4
4
  "type": "module",
5
- "version": "1.0.6",
5
+ "version": "1.0.7",
6
6
  "main": "dist/index.js",
7
7
  "exports": {
8
8
  ".": {
9
9
  "import": "./dist/elements.js",
10
10
  "types": "./dist/index.d.ts"
11
11
  },
12
+ "./server": {
13
+ "import": "./dist/server.js",
14
+ "types": "./dist/server.d.ts"
15
+ },
12
16
  "./elements.css": "./dist/elements.css"
13
17
  },
14
18
  "files": [
@@ -23,7 +27,7 @@
23
27
  },
24
28
  "scripts": {
25
29
  "build": "vite build",
26
- "lint": "eslint .",
30
+ "lint": "eslint src",
27
31
  "analyze": "pnpm dlx vite-bundle-visualizer",
28
32
  "storybook": "storybook dev -p 6006",
29
33
  "build-storybook": "storybook build",
@@ -55,7 +59,7 @@
55
59
  "tailwind-merge": "^3.3.1"
56
60
  },
57
61
  "devDependencies": {
58
- "@ai-sdk/openai": "2.0.0-beta.5",
62
+ "@ai-sdk/openai": "^2.0.0-beta.5",
59
63
  "@assistant-ui/react": "^0.11.37",
60
64
  "@assistant-ui/react-markdown": "^0.11.4",
61
65
  "@eslint/compat": "^2.0.0",
@@ -75,6 +79,7 @@
75
79
  "eslint-plugin-storybook": "^10.1.4",
76
80
  "eslint-plugin-unused-imports": "^4.3.0",
77
81
  "motion": "^12.23.14",
82
+ "openai": "^6.9.1",
78
83
  "prettier": "^3.7.4",
79
84
  "prettier-plugin-tailwindcss": "^0.7.2",
80
85
  "remark-gfm": "^4.0.1",