@nqminds/mcp-client 1.0.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md ADDED
@@ -0,0 +1,188 @@
1
+ # @flair/mcp-client
2
+
3
+ A complete, ready-to-use React component and backend client for MCP (Model Context Protocol) AI chat with OpenAI integration. Includes both the UI component and the OpenAI-powered backend logic.
4
+
5
+ ## Features
6
+
7
+ - 🎨 **Complete UI Component** - Ready-to-use chat interface with streaming support
8
+ - 🤖 **OpenAI Integration** - Built-in OpenAI client with conversation management
9
+ - 🔧 **API Helpers** - Simple Next.js API route helpers for instant setup
10
+ - 🎨 **Customizable Styling** - CSS variables for easy theming
11
+ - 📦 **All-in-One Package** - No need to configure MCP clients separately
12
+
13
+ ## Installation
14
+
15
+ ```bash
16
+ npm install @flair/mcp-client
17
+ ```
18
+
19
+ ## Quick Start
20
+
21
+ ### 1. Set up environment variables
22
+
23
+ Create a `.env.local` file:
24
+
25
+ ```env
26
+ OPENAI_API_KEY=your_openai_api_key
27
+ MCP_SERVER_COMMAND="node /path/to/your/mcp-server/build/index.js"
28
+ OPENAI_MODEL=chatgpt-5-mini
29
+ ```
30
+
31
+ ### 2. Create API route
32
+
33
+ Create `app/api/mcp/chat/route.ts`:
34
+
35
+ ```typescript
36
+ import { createMCPChatHandler, createMCPClearHandler } from "@flair/mcp-client/server";
37
+
38
+ const chatHandler = createMCPChatHandler({
39
+ openaiApiKey: process.env.OPENAI_API_KEY!,
40
+ mcpServerCommand: process.env.MCP_SERVER_COMMAND!,
41
+ openaiModel: process.env.OPENAI_MODEL,
42
+ });
43
+
44
+ const clearHandler = createMCPClearHandler();
45
+
46
+ export async function POST(req: Request) {
47
+ return chatHandler(req);
48
+ }
49
+
50
+ export async function DELETE(req: Request) {
51
+ return clearHandler(req);
52
+ }
53
+ ```
54
+
55
+ ### 3. Add the component to your app
56
+
57
+ ```tsx
58
+ import { MCPChat } from '@flair/mcp-client';
59
+ import '@flair/mcp-client/dist/styles/MCPChat.css';
60
+
61
+ export default function Page() {
62
+ return (
63
+ <div>
64
+ <h1>My App</h1>
65
+ <MCPChat companyNumber="12345678" />
66
+ </div>
67
+ );
68
+ }
69
+ ```
70
+
71
+ ### 4. Import CSS in your layout
72
+
73
+ ```tsx
74
+ import '@flair/mcp-client/dist/styles/MCPChat.css';
75
+ ```
76
+
77
+ That's it! Your MCP chat is ready to use.
78
+
79
+ ## Component Props
80
+
81
+ ```typescript
82
+ interface MCPChatProps {
83
+ companyNumber?: string; // Optional context
84
+ apiEndpoint?: string; // Default: "/api/mcp/chat"
85
+ customStyles?: React.CSSProperties; // CSS variable overrides
86
+ className?: string; // Additional CSS class
87
+ }
88
+ ```
89
+
90
+ ## Custom Styling
91
+
92
+ Override CSS variables:
93
+
94
+ ```tsx
95
+ const customStyles = {
96
+ '--mcp-primary-color': '#7c5cff',
97
+ '--mcp-border-radius': '24px',
98
+ } as React.CSSProperties;
99
+
100
+ <MCPChat customStyles={customStyles} />
101
+ ```
102
+
103
+ Available CSS variables:
104
+ - `--mcp-primary-color`
105
+ - `--mcp-bg`
106
+ - `--mcp-card-bg`
107
+ - `--mcp-text`
108
+ - `--mcp-text-secondary`
109
+ - `--mcp-border`
110
+ - `--mcp-border-radius`
111
+ - `--mcp-spacing`
112
+
113
+ ## Advanced Usage
114
+
115
+ ### Direct Client Usage
116
+
117
+ If you need more control, use the client directly (server-side only):
118
+
119
+ ```typescript
120
+ import { MCPClientOpenAI } from '@flair/mcp-client/server';
121
+
122
+ const client = new MCPClientOpenAI({
123
+ openaiApiKey: process.env.OPENAI_API_KEY!,
124
+ mcpServerCommand: process.env.MCP_SERVER_COMMAND!,
125
+ openaiModel: "chatgpt-5-mini",
126
+ });
127
+
128
+ await client.connect();
129
+ const response = await client.processQuery("Hello!", (thinking) => {
130
+ console.log(thinking);
131
+ });
132
+ console.log(response);
133
+ await client.cleanup();
134
+ ```
135
+
136
+ ### Custom API Implementation
137
+
138
+ Create your own streaming handler (server-side):
139
+
140
+ ```typescript
141
+ import { MCPClientOpenAI } from '@flair/mcp-client/server';
142
+
143
+ export async function POST(req: Request) {
144
+ const { message } = await req.json();
145
+
146
+ const client = new MCPClientOpenAI({
147
+ openaiApiKey: process.env.OPENAI_API_KEY!,
148
+ mcpServerCommand: process.env.MCP_SERVER_COMMAND!,
149
+ });
150
+
151
+ await client.connect();
152
+
153
+ const response = await client.processQuery(message, (thinking) => {
154
+ // Handle thinking steps
155
+ });
156
+
157
+ await client.cleanup();
158
+
159
+ return Response.json({ response });
160
+ }
161
+ ```
162
+
163
+ ## API Reference
164
+
165
+ See [EXAMPLES.md](EXAMPLES.md) for detailed examples.
166
+
167
+ ## Development
168
+
169
+ ### Building the Package
170
+
171
+ ```bash
172
+ # Build with automatic patch version bump (1.0.0 → 1.0.1)
173
+ npm run build
174
+
175
+ # Build without version bump (development)
176
+ npm run build:no-version
177
+
178
+ # Bump minor version (1.0.0 → 1.1.0) for new features
179
+ npm run version:minor
180
+
181
+ # Bump major version (1.0.0 → 2.0.0) for breaking changes
182
+ npm run version:major
183
+
184
+ # Create release tarball
185
+ npm run release
186
+ ```
187
+
188
+ **Versioning:** Every `npm run build` automatically increments the patch version.
@@ -0,0 +1,4 @@
1
+ import React from "react";
2
+ import type { MCPChatProps } from "./types";
3
+ export declare function MCPChat({ companyNumber, apiEndpoint, customStyles, className }: MCPChatProps): React.JSX.Element;
4
+ //# sourceMappingURL=MCPChat.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"MCPChat.d.ts","sourceRoot":"","sources":["../src/MCPChat.tsx"],"names":[],"mappings":"AAEA,OAAO,KAAsC,MAAM,OAAO,CAAC;AAE3D,OAAO,KAAK,EAAyB,YAAY,EAAe,MAAM,SAAS,CAAC;AAEhF,wBAAgB,OAAO,CAAC,EACtB,aAAa,EACb,WAA6B,EAC7B,YAAiB,EACjB,SAAc,EACf,EAAE,YAAY,qBAgTd"}
@@ -0,0 +1,207 @@
1
+ "use client";
2
+ import React, { useState, useRef, useEffect } from "react";
3
+ import ReactMarkdown from "react-markdown";
4
+ export function MCPChat({ companyNumber, apiEndpoint = "/api/mcp/chat", customStyles = {}, className = "" }) {
5
+ const [messages, setMessages] = useState([]);
6
+ const [input, setInput] = useState("");
7
+ const [isLoading, setIsLoading] = useState(false);
8
+ const [thinkingSteps, setThinkingSteps] = useState([]);
9
+ const [isExpanded, setIsExpanded] = useState(false);
10
+ const messagesEndRef = useRef(null);
11
+ const thinkingEndRef = useRef(null);
12
+ // Merge custom styles with default CSS variables
13
+ const containerStyle = {
14
+ ...customStyles,
15
+ };
16
+ const scrollToBottom = () => {
17
+ messagesEndRef.current?.scrollIntoView({ behavior: "smooth" });
18
+ };
19
+ useEffect(() => {
20
+ scrollToBottom();
21
+ }, [messages]);
22
+ const handleSubmit = async (e) => {
23
+ e.preventDefault();
24
+ if (!input.trim() || isLoading)
25
+ return;
26
+ const userMessage = {
27
+ role: "user",
28
+ content: input.trim(),
29
+ timestamp: new Date(),
30
+ };
31
+ setMessages((prev) => [...prev, userMessage]);
32
+ setInput("");
33
+ setIsLoading(true);
34
+ setThinkingSteps([]);
35
+ // Add initial thinking step
36
+ let thinkingStepCounter = 0;
37
+ const addThinkingStep = (message) => {
38
+ setThinkingSteps((prev) => [
39
+ ...prev,
40
+ { id: `${Date.now()}-${thinkingStepCounter++}`, message, timestamp: new Date() },
41
+ ]);
42
+ // Auto-scroll thinking window
43
+ setTimeout(() => {
44
+ thinkingEndRef.current?.scrollIntoView({ behavior: "smooth" });
45
+ }, 50);
46
+ };
47
+ addThinkingStep("🤔 Analyzing your question...");
48
+ try {
49
+ // Call your API route that communicates with MCP
50
+ const response = await fetch(apiEndpoint, {
51
+ method: "POST",
52
+ headers: { "Content-Type": "application/json" },
53
+ body: JSON.stringify({
54
+ message: userMessage.content,
55
+ context: companyNumber ? { company_number: companyNumber } : undefined,
56
+ }),
57
+ });
58
+ if (!response.ok) {
59
+ throw new Error("Failed to get response");
60
+ }
61
+ const reader = response.body?.getReader();
62
+ const decoder = new TextDecoder();
63
+ if (!reader) {
64
+ throw new Error("No response body");
65
+ }
66
+ // Create a streaming message
67
+ const streamingMessage = {
68
+ role: "assistant",
69
+ content: "",
70
+ timestamp: new Date(),
71
+ isStreaming: true,
72
+ };
73
+ setMessages((prev) => [...prev, streamingMessage]);
74
+ let buffer = "";
75
+ let done = false;
76
+ while (!done) {
77
+ const { value, done: readerDone } = await reader.read();
78
+ done = readerDone;
79
+ if (value) {
80
+ buffer += decoder.decode(value, { stream: true });
81
+ const lines = buffer.split("\n");
82
+ buffer = lines.pop() || "";
83
+ for (const line of lines) {
84
+ if (line.startsWith("data: ")) {
85
+ const data = line.slice(6);
86
+ if (data === "[DONE]") {
87
+ continue;
88
+ }
89
+ try {
90
+ const parsed = JSON.parse(data);
91
+ if (parsed.type === "thinking") {
92
+ addThinkingStep(parsed.message || "Processing...");
93
+ }
94
+ else if (parsed.type === "content") {
95
+ // Update the streaming message with new content
96
+ setMessages((prev) => {
97
+ const updated = [...prev];
98
+ const lastIndex = updated.length - 1;
99
+ if (lastIndex >= 0 && updated[lastIndex].isStreaming) {
100
+ // Create a new message object instead of mutating
101
+ updated[lastIndex] = {
102
+ ...updated[lastIndex],
103
+ content: updated[lastIndex].content + (parsed.chunk || ""),
104
+ };
105
+ }
106
+ return updated;
107
+ });
108
+ }
109
+ else if (parsed.type === "done") {
110
+ // Mark streaming as complete
111
+ setMessages((prev) => {
112
+ const updated = [...prev];
113
+ const lastIndex = updated.length - 1;
114
+ if (lastIndex >= 0 && updated[lastIndex].isStreaming) {
115
+ // Create a new message object instead of mutating
116
+ updated[lastIndex] = {
117
+ ...updated[lastIndex],
118
+ isStreaming: false,
119
+ };
120
+ }
121
+ return updated;
122
+ });
123
+ }
124
+ else if (parsed.type === "error") {
125
+ // Handle error from stream
126
+ throw new Error(parsed.message || "Stream error");
127
+ }
128
+ }
129
+ catch (e) {
130
+ console.error("Error parsing SSE data:", e);
131
+ }
132
+ }
133
+ }
134
+ }
135
+ }
136
+ }
137
+ catch (error) {
138
+ console.error("Error:", error);
139
+ const errorMessage = {
140
+ role: "assistant",
141
+ content: "Sorry, I encountered an error. Please try again.",
142
+ timestamp: new Date(),
143
+ };
144
+ setMessages((prev) => {
145
+ // Remove streaming message if it exists
146
+ const filtered = prev.filter((m) => !m.isStreaming);
147
+ return [...filtered, errorMessage];
148
+ });
149
+ }
150
+ finally {
151
+ setIsLoading(false);
152
+ // Clear thinking steps after a brief delay
153
+ setTimeout(() => setThinkingSteps([]), 2000);
154
+ }
155
+ };
156
+ const clearChat = async () => {
157
+ setMessages([]);
158
+ // Also clear server-side conversation history
159
+ try {
160
+ await fetch(apiEndpoint, {
161
+ method: "DELETE",
162
+ headers: { "Content-Type": "application/json" },
163
+ body: JSON.stringify({ sessionId: "default" }),
164
+ });
165
+ }
166
+ catch (error) {
167
+ console.error("Failed to clear server conversation:", error);
168
+ }
169
+ };
170
+ if (!isExpanded) {
171
+ return (React.createElement("div", { className: `mcp-chat-trigger ${className}`, style: containerStyle },
172
+ React.createElement("button", { onClick: () => setIsExpanded(true), className: "mcp-chat-trigger-button" }, "\uD83D\uDCAC Ask AI Assistant")));
173
+ }
174
+ return (React.createElement("div", { className: `mcp-chat-container ${className}`, style: containerStyle },
175
+ React.createElement("div", { className: "mcp-chat-header" },
176
+ React.createElement("h3", { className: "mcp-chat-title" }, "AI Assistant"),
177
+ React.createElement("div", { className: "mcp-chat-header-actions" },
178
+ messages.length > 0 && (React.createElement("button", { onClick: clearChat, className: "mcp-chat-button mcp-chat-button-secondary" }, "Clear")),
179
+ React.createElement("button", { onClick: () => setIsExpanded(false), className: "mcp-chat-button mcp-chat-button-secondary" }, "\u2715"))),
180
+ React.createElement("div", { className: "mcp-chat-messages" },
181
+ messages.length === 0 && (React.createElement("div", { className: "mcp-chat-welcome" },
182
+ React.createElement("p", { className: "mcp-chat-welcome-title" }, "\uD83D\uDC4B Hi! I can help you with company information."),
183
+ companyNumber && (React.createElement("p", { className: "mcp-chat-welcome-subtitle" },
184
+ "Currently viewing: ",
185
+ React.createElement("strong", null, companyNumber))),
186
+ React.createElement("p", { className: "mcp-chat-welcome-subtitle" }, "Try asking:"),
187
+ React.createElement("ul", { className: "mcp-chat-suggestions" },
188
+ React.createElement("li", null, "\u201CSay hello\u201D"),
189
+ companyNumber && (React.createElement(React.Fragment, null,
190
+ React.createElement("li", null, "\u201CGet company info\u201D"),
191
+ React.createElement("li", null, "\u201CShow me financial data from 2020\u201D")))))),
192
+ messages.map((msg, idx) => (React.createElement("div", { key: idx, className: `mcp-chat-message ${msg.role === "user" ? "mcp-chat-message-user" : "mcp-chat-message-assistant"}` },
193
+ React.createElement("div", { className: "mcp-chat-message-bubble" },
194
+ msg.role === "assistant" ? (React.createElement("div", { className: "mcp-chat-message-content markdown-content" },
195
+ React.createElement(ReactMarkdown, null, msg.content))) : (React.createElement("div", { className: "mcp-chat-message-content" }, msg.content)),
196
+ React.createElement("div", { className: "mcp-chat-message-timestamp" }, msg.timestamp.toLocaleTimeString()))))),
197
+ isLoading && thinkingSteps.length > 0 && (React.createElement("div", { className: "mcp-chat-message mcp-chat-message-assistant" },
198
+ React.createElement("div", { className: "mcp-chat-thinking" },
199
+ React.createElement("div", { className: "mcp-chat-thinking-title" }, "\uD83D\uDCAD Processing..."),
200
+ React.createElement("div", { className: "mcp-chat-thinking-steps" },
201
+ thinkingSteps.map((step) => (React.createElement("div", { key: step.id, className: "mcp-chat-thinking-step" }, step.message))),
202
+ React.createElement("div", { ref: thinkingEndRef }))))),
203
+ React.createElement("div", { ref: messagesEndRef })),
204
+ React.createElement("form", { onSubmit: handleSubmit, className: "mcp-chat-input-form" },
205
+ React.createElement("input", { type: "text", value: input, onChange: (e) => setInput(e.target.value), placeholder: "Ask a question...", className: "mcp-chat-input", disabled: isLoading }),
206
+ React.createElement("button", { type: "submit", className: "mcp-chat-button mcp-chat-button-primary", disabled: isLoading || !input.trim() }, "Send"))));
207
+ }
@@ -0,0 +1,21 @@
1
+ /**
2
+ * Helper utilities for creating MCP API routes in Next.js
3
+ */
4
+ export interface CreateMCPHandlerConfig {
5
+ openaiApiKey: string;
6
+ mcpServerCommand: string;
7
+ openaiModel?: string;
8
+ }
9
+ /**
10
+ * Creates a streaming MCP chat handler for Next.js API routes
11
+ */
12
+ export declare function createMCPChatHandler(config: CreateMCPHandlerConfig): (request: Request) => Promise<Response>;
13
+ /**
14
+ * Creates a handler for clearing conversation history
15
+ */
16
+ export declare function createMCPClearHandler(): (request: Request) => Promise<Response>;
17
+ /**
18
+ * Cleanup function to close all MCP clients
19
+ */
20
+ export declare function cleanupMCPClients(): Promise<void>;
21
+ //# sourceMappingURL=api-helpers.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"api-helpers.d.ts","sourceRoot":"","sources":["../src/api-helpers.ts"],"names":[],"mappings":"AAAA;;GAEG;AAOH,MAAM,WAAW,sBAAsB;IACrC,YAAY,EAAE,MAAM,CAAC;IACrB,gBAAgB,EAAE,MAAM,CAAC;IACzB,WAAW,CAAC,EAAE,MAAM,CAAC;CACtB;AAED;;GAEG;AACH,wBAAgB,oBAAoB,CAAC,MAAM,EAAE,sBAAsB,IACnD,SAAS,OAAO,uBAiE/B;AAED;;GAEG;AACH,wBAAgB,qBAAqB,KACrB,SAAS,OAAO,uBAU/B;AAED;;GAEG;AACH,wBAAsB,iBAAiB,kBAKtC"}
@@ -0,0 +1,87 @@
1
+ /**
2
+ * Helper utilities for creating MCP API routes in Next.js
3
+ */
4
+ import { MCPClientOpenAI } from "./openai-client";
5
+ // Global client instance (singleton per session)
6
+ const clients = new Map();
7
+ /**
8
+ * Creates a streaming MCP chat handler for Next.js API routes
9
+ */
10
+ export function createMCPChatHandler(config) {
11
+ return async (request) => {
12
+ const { message, context, sessionId = "default" } = await request.json();
13
+ // Get or create client for this session
14
+ let client = clients.get(sessionId);
15
+ if (!client) {
16
+ client = new MCPClientOpenAI({
17
+ openaiApiKey: config.openaiApiKey,
18
+ mcpServerCommand: config.mcpServerCommand,
19
+ openaiModel: config.openaiModel,
20
+ });
21
+ await client.connect();
22
+ clients.set(sessionId, client);
23
+ }
24
+ // Create a streaming response
25
+ const stream = new ReadableStream({
26
+ async start(controller) {
27
+ const encoder = new TextEncoder();
28
+ const sendEvent = (type, data) => {
29
+ controller.enqueue(encoder.encode(`data: ${JSON.stringify({ type, ...data })}\n\n`));
30
+ };
31
+ try {
32
+ sendEvent("thinking", { message: "🤔 Analyzing your question..." });
33
+ // Process the query with thinking callback
34
+ const response = await client.processQuery(context ? `${message}\nContext: ${JSON.stringify(context)}` : message, (thinkingMessage) => {
35
+ sendEvent("thinking", { message: thinkingMessage });
36
+ });
37
+ // Stream the response in chunks
38
+ const chunkSize = 10;
39
+ for (let i = 0; i < response.length; i += chunkSize) {
40
+ const chunk = response.slice(i, i + chunkSize);
41
+ sendEvent("content", { chunk });
42
+ // Small delay for better streaming effect
43
+ await new Promise((resolve) => setTimeout(resolve, 20));
44
+ }
45
+ sendEvent("done", {});
46
+ }
47
+ catch (error) {
48
+ sendEvent("error", {
49
+ message: error instanceof Error ? error.message : "An error occurred",
50
+ });
51
+ }
52
+ finally {
53
+ controller.close();
54
+ }
55
+ },
56
+ });
57
+ return new Response(stream, {
58
+ headers: {
59
+ "Content-Type": "text/event-stream",
60
+ "Cache-Control": "no-cache",
61
+ Connection: "keep-alive",
62
+ },
63
+ });
64
+ };
65
+ }
66
+ /**
67
+ * Creates a handler for clearing conversation history
68
+ */
69
+ export function createMCPClearHandler() {
70
+ return async (request) => {
71
+ const { sessionId = "default" } = await request.json();
72
+ const client = clients.get(sessionId);
73
+ if (client) {
74
+ client.clearHistory();
75
+ }
76
+ return Response.json({ success: true });
77
+ };
78
+ }
79
+ /**
80
+ * Cleanup function to close all MCP clients
81
+ */
82
+ export async function cleanupMCPClients() {
83
+ for (const [sessionId, client] of clients.entries()) {
84
+ await client.cleanup();
85
+ clients.delete(sessionId);
86
+ }
87
+ }
@@ -0,0 +1,3 @@
1
+ export { MCPChat } from "./MCPChat";
2
+ export type { MCPChatProps, Message, ThinkingStep, StreamEvent } from "./types";
3
+ //# sourceMappingURL=index.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,OAAO,EAAE,MAAM,WAAW,CAAC;AACpC,YAAY,EAAE,YAAY,EAAE,OAAO,EAAE,YAAY,EAAE,WAAW,EAAE,MAAM,SAAS,CAAC"}
package/dist/index.js ADDED
@@ -0,0 +1,2 @@
1
+ // Client-side exports only (safe for browser)
2
+ export { MCPChat } from "./MCPChat";
@@ -0,0 +1,26 @@
1
+ /**
2
+ * OpenAI-powered MCP Client
3
+ * Uses ChatGPT to intelligently interact with MCP tools
4
+ */
5
+ export interface MCPClientConfig {
6
+ openaiApiKey: string;
7
+ mcpServerCommand: string;
8
+ openaiModel?: string;
9
+ clientName?: string;
10
+ clientVersion?: string;
11
+ }
12
+ export declare class MCPClientOpenAI {
13
+ private client;
14
+ private openai;
15
+ private transport;
16
+ private conversationHistory;
17
+ private lastCompaction;
18
+ private config;
19
+ constructor(config: MCPClientConfig);
20
+ private compactConversation;
21
+ connect(): Promise<void>;
22
+ processQuery(query: string, onThinking?: (message: string) => void): Promise<string>;
23
+ clearHistory(): void;
24
+ cleanup(): Promise<void>;
25
+ }
26
+ //# sourceMappingURL=openai-client.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"openai-client.d.ts","sourceRoot":"","sources":["../src/openai-client.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAMH,MAAM,WAAW,eAAe;IAC9B,YAAY,EAAE,MAAM,CAAC;IACrB,gBAAgB,EAAE,MAAM,CAAC;IACzB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,aAAa,CAAC,EAAE,MAAM,CAAC;CACxB;AAED,qBAAa,eAAe;IAC1B,OAAO,CAAC,MAAM,CAAS;IACvB,OAAO,CAAC,MAAM,CAAS;IACvB,OAAO,CAAC,SAAS,CAAuB;IACxC,OAAO,CAAC,mBAAmB,CAAsB;IACjD,OAAO,CAAC,cAAc,CAAa;IACnC,OAAO,CAAC,MAAM,CAA4B;gBAE9B,MAAM,EAAE,eAAe;YAkCrB,mBAAmB;IAoB3B,OAAO,IAAI,OAAO,CAAC,IAAI,CAAC;IAIxB,YAAY,CAAC,KAAK,EAAE,MAAM,EAAE,UAAU,CAAC,EAAE,CAAC,OAAO,EAAE,MAAM,KAAK,IAAI,GAAG,OAAO,CAAC,MAAM,CAAC;IAqL1F,YAAY,IAAI,IAAI;IAId,OAAO,IAAI,OAAO,CAAC,IAAI,CAAC;CAG/B"}
@@ -0,0 +1,225 @@
1
+ /**
2
+ * OpenAI-powered MCP Client
3
+ * Uses ChatGPT to intelligently interact with MCP tools
4
+ */
5
+ import { Client } from "@modelcontextprotocol/sdk/client/index.js";
6
+ import { StdioClientTransport } from "@modelcontextprotocol/sdk/client/stdio.js";
7
+ import OpenAI from "openai";
8
+ export class MCPClientOpenAI {
9
+ constructor(config) {
10
+ this.conversationHistory = [];
11
+ this.lastCompaction = 0;
12
+ this.config = {
13
+ openaiApiKey: config.openaiApiKey,
14
+ mcpServerCommand: config.mcpServerCommand,
15
+ openaiModel: config.openaiModel || "chatgpt-5-mini",
16
+ clientName: config.clientName || "mcp-flair-client",
17
+ clientVersion: config.clientVersion || "1.0.0",
18
+ };
19
+ this.openai = new OpenAI({
20
+ apiKey: this.config.openaiApiKey,
21
+ });
22
+ // Parse the server command and args
23
+ const serverCmd = this.config.mcpServerCommand.split(" ");
24
+ const command = serverCmd[0];
25
+ const args = serverCmd.slice(1);
26
+ this.transport = new StdioClientTransport({
27
+ command: command,
28
+ args: args,
29
+ });
30
+ this.client = new Client({
31
+ name: this.config.clientName,
32
+ version: this.config.clientVersion,
33
+ }, {
34
+ capabilities: {},
35
+ });
36
+ }
37
+ async compactConversation() {
38
+ try {
39
+ const compactionResponse = await this.openai.responses.compact({
40
+ model: this.config.openaiModel,
41
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
42
+ input: this.conversationHistory,
43
+ });
44
+ this.conversationHistory = compactionResponse.output;
45
+ this.lastCompaction = Date.now();
46
+ }
47
+ catch (error) {
48
+ // Keep system message and last 25 items
49
+ if (this.conversationHistory.length > 26) {
50
+ const systemMessage = this.conversationHistory[0];
51
+ const recentItems = this.conversationHistory.slice(-25);
52
+ this.conversationHistory = [systemMessage, ...recentItems];
53
+ }
54
+ }
55
+ }
56
+ async connect() {
57
+ await this.client.connect(this.transport);
58
+ }
59
+ async processQuery(query, onThinking) {
60
+ // Check if we should compact
61
+ const shouldCompact = this.conversationHistory.length >= 40 &&
62
+ (Date.now() - this.lastCompaction > 10 * 60 * 1000);
63
+ if (shouldCompact) {
64
+ await this.compactConversation();
65
+ }
66
+ // Add user message to conversation history
67
+ this.conversationHistory.push({
68
+ type: "message",
69
+ role: "user",
70
+ content: [
71
+ {
72
+ type: "input_text",
73
+ text: query,
74
+ }
75
+ ],
76
+ });
77
+ // Get available tools from MCP server
78
+ const toolsResponse = await this.client.listTools();
79
+ // Convert MCP tools to OpenAI Responses API format
80
+ const tools = toolsResponse.tools.map((tool) => ({
81
+ type: "function",
82
+ name: tool.name,
83
+ description: tool.description || "",
84
+ parameters: tool.inputSchema,
85
+ strict: false,
86
+ }));
87
+ // Multi-turn conversation with tool calling
88
+ let loopCount = 0;
89
+ const maxLoops = 15;
90
+ let finalResponse = "";
91
+ let outOfToolCalls = false;
92
+ while (loopCount < maxLoops) {
93
+ loopCount++;
94
+ // Call OpenAI Responses API with error handling
95
+ let response;
96
+ try {
97
+ response = await this.openai.responses.create({
98
+ model: this.config.openaiModel,
99
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
100
+ input: this.conversationHistory,
101
+ tools: outOfToolCalls ? [] : tools,
102
+ });
103
+ }
104
+ catch (error) {
105
+ const err = error;
106
+ // Handle context length exceeded
107
+ if (err.status === 400 &&
108
+ (err.code === 'context_length_exceeded' ||
109
+ err.message?.includes('context') ||
110
+ err.message?.includes('length'))) {
111
+ await this.compactConversation();
112
+ response = await this.openai.responses.create({
113
+ model: this.config.openaiModel,
114
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
115
+ input: this.conversationHistory,
116
+ tools: outOfToolCalls ? [] : tools,
117
+ });
118
+ }
119
+ // Handle tool calls exhausted
120
+ else if (err.status === 400 &&
121
+ (err.code === 'response_incomplete' ||
122
+ err.message?.includes('incomplete') ||
123
+ err.message?.includes('tool'))) {
124
+ outOfToolCalls = true;
125
+ response = await this.openai.responses.create({
126
+ model: this.config.openaiModel,
127
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
128
+ input: this.conversationHistory,
129
+ tools: [],
130
+ });
131
+ }
132
+ else {
133
+ throw error;
134
+ }
135
+ }
136
+ const output = response.output;
137
+ // Find function_call items
138
+ const functionCalls = output.filter((item) => item.type === "function_call");
139
+ // Check if AI wants to call tools
140
+ if (functionCalls.length > 0) {
141
+ this.conversationHistory.push(...output);
142
+ for (const functionCall of functionCalls) {
143
+ const functionName = functionCall.name;
144
+ const functionArgs = typeof functionCall.arguments === 'string'
145
+ ? JSON.parse(functionCall.arguments)
146
+ : functionCall.arguments;
147
+ onThinking?.(`🔧 Using tool: ${functionName}`);
148
+ try {
149
+ // Execute the tool via MCP
150
+ const result = await this.client.callTool({
151
+ name: functionName,
152
+ arguments: functionArgs,
153
+ });
154
+ // Add tool result to conversation history
155
+ this.conversationHistory.push({
156
+ type: "function_call_output",
157
+ call_id: functionCall.call_id,
158
+ output: JSON.stringify(result.content),
159
+ });
160
+ }
161
+ catch (error) {
162
+ this.conversationHistory.push({
163
+ type: "function_call_output",
164
+ call_id: functionCall.call_id,
165
+ output: `Error: ${error instanceof Error ? error.message : String(error)}`,
166
+ });
167
+ }
168
+ }
169
+ continue;
170
+ }
171
+ else {
172
+ // No more tool calls, extract final response
173
+ for (const item of output) {
174
+ if (item.type === "message" && item.role === "assistant") {
175
+ for (const contentItem of item.content) {
176
+ if (contentItem.type === "output_text") {
177
+ finalResponse += contentItem.text;
178
+ }
179
+ }
180
+ }
181
+ }
182
+ this.conversationHistory.push(...output);
183
+ break;
184
+ }
185
+ }
186
+ // If we hit max loops, make one final request without tools
187
+ if (loopCount >= maxLoops && !finalResponse) {
188
+ try {
189
+ const finalApiResponse = await this.openai.responses.create({
190
+ model: this.config.openaiModel,
191
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
192
+ input: this.conversationHistory,
193
+ tools: [],
194
+ });
195
+ const finalOutput = finalApiResponse.output;
196
+ for (const item of finalOutput) {
197
+ if (item.type === "message" && item.role === "assistant") {
198
+ for (const contentItem of item.content) {
199
+ if (contentItem.type === "output_text") {
200
+ finalResponse += contentItem.text;
201
+ }
202
+ }
203
+ }
204
+ }
205
+ this.conversationHistory.push(...finalOutput);
206
+ }
207
+ catch (error) {
208
+ finalResponse = "I've gathered information but reached my reasoning limit. Please try rephrasing your question.";
209
+ }
210
+ }
211
+ // Keep conversation history manageable
212
+ if (this.conversationHistory.length > 50) {
213
+ const systemMessage = this.conversationHistory[0];
214
+ const recentItems = this.conversationHistory.slice(-49);
215
+ this.conversationHistory = [systemMessage, ...recentItems];
216
+ }
217
+ return finalResponse;
218
+ }
219
+ clearHistory() {
220
+ this.conversationHistory = [];
221
+ }
222
+ async cleanup() {
223
+ await this.client.close();
224
+ }
225
+ }
@@ -0,0 +1,3 @@
1
+ export { MCPClientOpenAI, type MCPClientConfig } from "./openai-client";
2
+ export { createMCPChatHandler, createMCPClearHandler, cleanupMCPClients } from "./api-helpers";
3
+ //# sourceMappingURL=server.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"server.d.ts","sourceRoot":"","sources":["../src/server.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,eAAe,EAAE,KAAK,eAAe,EAAE,MAAM,iBAAiB,CAAC;AACxE,OAAO,EAAE,oBAAoB,EAAE,qBAAqB,EAAE,iBAAiB,EAAE,MAAM,eAAe,CAAC"}
package/dist/server.js ADDED
@@ -0,0 +1,3 @@
1
+ // Server-side exports only (Node.js)
2
+ export { MCPClientOpenAI } from "./openai-client";
3
+ export { createMCPChatHandler, createMCPClearHandler, cleanupMCPClients } from "./api-helpers";
@@ -0,0 +1,340 @@
1
+ /* MCP Chat Component Styles */
2
+
3
+ /* CSS Variables for customization */
4
+ :root {
5
+ --mcp-primary-color: #4ea1ff;
6
+ --mcp-bg: #0b0d12;
7
+ --mcp-card-bg: #121621;
8
+ --mcp-text: #eaf0ff;
9
+ --mcp-text-secondary: #99a2b2;
10
+ --mcp-border: #1e2533;
11
+ --mcp-border-radius: 16px;
12
+ --mcp-border-radius-small: 10px;
13
+ --mcp-spacing: 24px;
14
+ --mcp-spacing-small: 12px;
15
+ --mcp-box-shadow: 0 12px 40px rgba(0, 0, 0, 0.4);
16
+ }
17
+
18
+ /* Trigger Button (collapsed state) */
19
+ .mcp-chat-trigger {
20
+ position: fixed;
21
+ bottom: 24px;
22
+ right: 24px;
23
+ z-index: 1000;
24
+ }
25
+
26
+ .mcp-chat-trigger-button {
27
+ padding: 16px 32px;
28
+ font-size: 18px;
29
+ border-radius: 28px;
30
+ box-shadow: var(--mcp-box-shadow);
31
+ font-weight: 600;
32
+ background: var(--mcp-primary-color);
33
+ color: white;
34
+ border: none;
35
+ cursor: pointer;
36
+ font-family: inherit;
37
+ transition: transform 0.2s, box-shadow 0.2s;
38
+ }
39
+
40
+ .mcp-chat-trigger-button:hover {
41
+ transform: translateY(-2px);
42
+ box-shadow: 0 8px 30px rgba(0, 0, 0, 0.5);
43
+ }
44
+
45
+ .mcp-chat-trigger-button:active {
46
+ transform: translateY(0);
47
+ }
48
+
49
+ /* Main Container (expanded state) */
50
+ .mcp-chat-container {
51
+ position: fixed;
52
+ bottom: 24px;
53
+ right: 24px;
54
+ width: 550px;
55
+ max-width: calc(100vw - 48px);
56
+ height: 700px;
57
+ max-height: calc(100vh - 48px);
58
+ background: var(--mcp-bg);
59
+ border: 1px solid var(--mcp-border);
60
+ border-radius: var(--mcp-border-radius);
61
+ box-shadow: var(--mcp-box-shadow);
62
+ display: flex;
63
+ flex-direction: column;
64
+ z-index: 1000;
65
+ }
66
+
67
+ /* Header */
68
+ .mcp-chat-header {
69
+ padding: 18px 24px;
70
+ border-bottom: 1px solid var(--mcp-border);
71
+ display: flex;
72
+ justify-content: space-between;
73
+ align-items: center;
74
+ background: var(--mcp-card-bg);
75
+ border-top-left-radius: var(--mcp-border-radius);
76
+ border-top-right-radius: var(--mcp-border-radius);
77
+ }
78
+
79
+ .mcp-chat-title {
80
+ margin: 0;
81
+ font-size: 20px;
82
+ font-weight: 600;
83
+ color: var(--mcp-text);
84
+ }
85
+
86
+ .mcp-chat-header-actions {
87
+ display: flex;
88
+ gap: 10px;
89
+ }
90
+
91
+ /* Buttons */
92
+ .mcp-chat-button {
93
+ padding: 8px 16px;
94
+ font-size: 14px;
95
+ border-radius: 8px;
96
+ border: none;
97
+ cursor: pointer;
98
+ font-family: inherit;
99
+ font-weight: 500;
100
+ transition: opacity 0.2s, background 0.2s;
101
+ }
102
+
103
+ .mcp-chat-button:disabled {
104
+ opacity: 0.5;
105
+ cursor: not-allowed;
106
+ }
107
+
108
+ .mcp-chat-button-primary {
109
+ background: var(--mcp-primary-color);
110
+ color: white;
111
+ padding: 12px 24px;
112
+ font-size: 15px;
113
+ border-radius: var(--mcp-border-radius-small);
114
+ }
115
+
116
+ .mcp-chat-button-primary:hover:not(:disabled) {
117
+ opacity: 0.9;
118
+ }
119
+
120
+ .mcp-chat-button-secondary {
121
+ background: transparent;
122
+ border: 1px solid var(--mcp-border);
123
+ color: var(--mcp-text);
124
+ }
125
+
126
+ .mcp-chat-button-secondary:hover:not(:disabled) {
127
+ background: rgba(255, 255, 255, 0.05);
128
+ }
129
+
130
+ /* Messages Area */
131
+ .mcp-chat-messages {
132
+ flex: 1;
133
+ overflow-y: auto;
134
+ padding: var(--mcp-spacing);
135
+ display: flex;
136
+ flex-direction: column;
137
+ gap: 16px;
138
+ }
139
+
140
+ /* Welcome Message */
141
+ .mcp-chat-welcome {
142
+ text-align: center;
143
+ color: var(--mcp-text-secondary);
144
+ margin-top: 60px;
145
+ }
146
+
147
+ .mcp-chat-welcome-title {
148
+ font-size: 18px;
149
+ font-weight: 500;
150
+ color: var(--mcp-text);
151
+ }
152
+
153
+ .mcp-chat-welcome-subtitle {
154
+ font-size: 16px;
155
+ margin-top: 12px;
156
+ }
157
+
158
+ .mcp-chat-suggestions {
159
+ text-align: left;
160
+ font-size: 15px;
161
+ line-height: 2;
162
+ margin-top: 24px;
163
+ padding-left: 20px;
164
+ }
165
+
166
+ /* Message Bubbles */
167
+ .mcp-chat-message {
168
+ display: flex;
169
+ max-width: 85%;
170
+ }
171
+
172
+ .mcp-chat-message-user {
173
+ align-self: flex-end;
174
+ }
175
+
176
+ .mcp-chat-message-assistant {
177
+ align-self: flex-start;
178
+ }
179
+
180
+ .mcp-chat-message-bubble {
181
+ padding: 14px 18px;
182
+ border-radius: 12px;
183
+ }
184
+
185
+ .mcp-chat-message-user .mcp-chat-message-bubble {
186
+ background: var(--mcp-primary-color);
187
+ color: #fff;
188
+ }
189
+
190
+ .mcp-chat-message-assistant .mcp-chat-message-bubble {
191
+ background: var(--mcp-card-bg);
192
+ border: 1px solid var(--mcp-border);
193
+ color: var(--mcp-text);
194
+ }
195
+
196
+ .mcp-chat-message-content {
197
+ font-size: 15px;
198
+ white-space: pre-wrap;
199
+ line-height: 1.6;
200
+ }
201
+
202
+ .mcp-chat-message-timestamp {
203
+ font-size: 12px;
204
+ opacity: 0.6;
205
+ margin-top: 6px;
206
+ }
207
+
208
+ /* Markdown Content */
209
+ .markdown-content {
210
+ font-size: 15px;
211
+ line-height: 1.6;
212
+ }
213
+
214
+ .markdown-content p {
215
+ margin: 0 0 12px 0;
216
+ }
217
+
218
+ .markdown-content p:last-child {
219
+ margin-bottom: 0;
220
+ }
221
+
222
+ .markdown-content ul,
223
+ .markdown-content ol {
224
+ margin: 0 0 12px 0;
225
+ padding-left: 20px;
226
+ }
227
+
228
+ .markdown-content li {
229
+ margin: 4px 0;
230
+ }
231
+
232
+ .markdown-content code {
233
+ background: rgba(255, 255, 255, 0.1);
234
+ padding: 2px 6px;
235
+ border-radius: 4px;
236
+ font-size: 14px;
237
+ font-family: ui-monospace, monospace;
238
+ }
239
+
240
+ .markdown-content pre {
241
+ background: rgba(255, 255, 255, 0.05);
242
+ padding: 12px;
243
+ border-radius: 8px;
244
+ overflow-x: auto;
245
+ margin: 0 0 12px 0;
246
+ }
247
+
248
+ .markdown-content pre code {
249
+ background: none;
250
+ padding: 0;
251
+ }
252
+
253
+ /* Thinking Steps */
254
+ .mcp-chat-thinking {
255
+ padding: 14px 18px;
256
+ border-radius: 12px;
257
+ background: var(--mcp-card-bg);
258
+ border: 1px solid var(--mcp-border);
259
+ width: 100%;
260
+ }
261
+
262
+ .mcp-chat-thinking-title {
263
+ font-size: 14px;
264
+ font-weight: 600;
265
+ margin-bottom: 8px;
266
+ opacity: 0.8;
267
+ color: var(--mcp-text);
268
+ }
269
+
270
+ .mcp-chat-thinking-steps {
271
+ max-height: 120px;
272
+ overflow-y: auto;
273
+ font-size: 13px;
274
+ line-height: 1.6;
275
+ color: var(--mcp-text-secondary);
276
+ }
277
+
278
+ .mcp-chat-thinking-step {
279
+ padding-bottom: 6px;
280
+ margin-bottom: 6px;
281
+ border-bottom: 1px solid var(--mcp-border);
282
+ }
283
+
284
+ .mcp-chat-thinking-step:last-child {
285
+ border-bottom: none;
286
+ }
287
+
288
+ /* Input Form */
289
+ .mcp-chat-input-form {
290
+ padding: 20px;
291
+ border-top: 1px solid var(--mcp-border);
292
+ display: flex;
293
+ gap: var(--mcp-spacing-small);
294
+ background: var(--mcp-card-bg);
295
+ border-bottom-left-radius: var(--mcp-border-radius);
296
+ border-bottom-right-radius: var(--mcp-border-radius);
297
+ }
298
+
299
+ .mcp-chat-input {
300
+ flex: 1;
301
+ font-size: 15px;
302
+ padding: 12px 16px;
303
+ border-radius: var(--mcp-border-radius-small);
304
+ background: var(--mcp-bg);
305
+ border: 1px solid var(--mcp-border);
306
+ color: var(--mcp-text);
307
+ font-family: inherit;
308
+ outline: none;
309
+ }
310
+
311
+ .mcp-chat-input:focus {
312
+ border-color: var(--mcp-primary-color);
313
+ }
314
+
315
+ .mcp-chat-input:disabled {
316
+ opacity: 0.5;
317
+ cursor: not-allowed;
318
+ }
319
+
320
+ /* Scrollbar styling */
321
+ .mcp-chat-messages::-webkit-scrollbar,
322
+ .mcp-chat-thinking-steps::-webkit-scrollbar {
323
+ width: 8px;
324
+ }
325
+
326
+ .mcp-chat-messages::-webkit-scrollbar-track,
327
+ .mcp-chat-thinking-steps::-webkit-scrollbar-track {
328
+ background: transparent;
329
+ }
330
+
331
+ .mcp-chat-messages::-webkit-scrollbar-thumb,
332
+ .mcp-chat-thinking-steps::-webkit-scrollbar-thumb {
333
+ background: var(--mcp-border);
334
+ border-radius: 4px;
335
+ }
336
+
337
+ .mcp-chat-messages::-webkit-scrollbar-thumb:hover,
338
+ .mcp-chat-thinking-steps::-webkit-scrollbar-thumb:hover {
339
+ background: var(--mcp-text-secondary);
340
+ }
@@ -0,0 +1,26 @@
1
+ /**
2
+ * Shared types for MCP client
3
+ */
4
+ export interface Message {
5
+ role: "user" | "assistant";
6
+ content: string;
7
+ timestamp: Date;
8
+ isStreaming?: boolean;
9
+ }
10
+ export interface ThinkingStep {
11
+ id: string;
12
+ message: string;
13
+ timestamp: Date;
14
+ }
15
+ export interface MCPChatProps {
16
+ companyNumber?: string;
17
+ apiEndpoint?: string;
18
+ customStyles?: React.CSSProperties;
19
+ className?: string;
20
+ }
21
+ export interface StreamEvent {
22
+ type: "thinking" | "content" | "done" | "error";
23
+ message?: string;
24
+ chunk?: string;
25
+ }
26
+ //# sourceMappingURL=types.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../src/types.ts"],"names":[],"mappings":"AAAA;;GAEG;AAEH,MAAM,WAAW,OAAO;IACtB,IAAI,EAAE,MAAM,GAAG,WAAW,CAAC;IAC3B,OAAO,EAAE,MAAM,CAAC;IAChB,SAAS,EAAE,IAAI,CAAC;IAChB,WAAW,CAAC,EAAE,OAAO,CAAC;CACvB;AAED,MAAM,WAAW,YAAY;IAC3B,EAAE,EAAE,MAAM,CAAC;IACX,OAAO,EAAE,MAAM,CAAC;IAChB,SAAS,EAAE,IAAI,CAAC;CACjB;AAED,MAAM,WAAW,YAAY;IAC3B,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,YAAY,CAAC,EAAE,KAAK,CAAC,aAAa,CAAC;IACnC,SAAS,CAAC,EAAE,MAAM,CAAC;CACpB;AAED,MAAM,WAAW,WAAW;IAC1B,IAAI,EAAE,UAAU,GAAG,SAAS,GAAG,MAAM,GAAG,OAAO,CAAC;IAChD,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,KAAK,CAAC,EAAE,MAAM,CAAC;CAChB"}
package/dist/types.js ADDED
@@ -0,0 +1,4 @@
1
+ /**
2
+ * Shared types for MCP client
3
+ */
4
+ export {};
package/package.json ADDED
@@ -0,0 +1,59 @@
1
+ {
2
+ "name": "@nqminds/mcp-client",
3
+ "version": "1.0.4",
4
+ "description": "Reusable MCP client component with AI chat interface",
5
+ "main": "dist/index.js",
6
+ "types": "dist/index.d.ts",
7
+ "exports": {
8
+ ".": {
9
+ "types": "./dist/index.d.ts",
10
+ "default": "./dist/index.js"
11
+ },
12
+ "./server": {
13
+ "types": "./dist/server.d.ts",
14
+ "default": "./dist/server.js"
15
+ },
16
+ "./dist/styles/MCPChat.css": "./dist/styles/MCPChat.css"
17
+ },
18
+ "files": [
19
+ "dist",
20
+ "README.md"
21
+ ],
22
+ "scripts": {
23
+ "build": "npm run version:bump && tsc && npm run copy-css",
24
+ "build:no-version": "tsc && npm run copy-css",
25
+ "copy-css": "mkdir -p dist/styles && cp src/styles/MCPChat.css dist/styles/",
26
+ "dev": "tsc --watch",
27
+ "version:bump": "npm version patch --no-git-tag-version",
28
+ "version:minor": "npm version minor --no-git-tag-version && npm run build:no-version",
29
+ "version:major": "npm version major --no-git-tag-version && npm run build:no-version",
30
+ "release": "npm run build && npm pack",
31
+ "prepublishOnly": "npm run build:no-version"
32
+ },
33
+ "keywords": [
34
+ "mcp",
35
+ "ai",
36
+ "chat",
37
+ "react",
38
+ "component"
39
+ ],
40
+ "author": "Flair",
41
+ "license": "UNLICENSED",
42
+ "peerDependencies": {
43
+ "react": "^18.0.0 || ^19.0.0",
44
+ "react-dom": "^18.0.0 || ^19.0.0",
45
+ "react-markdown": "^9.0.0"
46
+ },
47
+ "dependencies": {
48
+ "@modelcontextprotocol/sdk": "^1.0.4",
49
+ "openai": "^6.15.0"
50
+ },
51
+ "devDependencies": {
52
+ "@types/react": "^19.0.0",
53
+ "@types/react-dom": "^19.0.0",
54
+ "react": "^19.0.0",
55
+ "react-dom": "^19.0.0",
56
+ "react-markdown": "^9.1.0",
57
+ "typescript": "^5.0.0"
58
+ }
59
+ }