drizzle-cube 0.4.19 → 0.4.21

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. package/dist/adapters/anthropic-BTkjgFpT.cjs +1 -0
  2. package/dist/adapters/anthropic-CTu9E801.js +126 -0
  3. package/dist/adapters/express/index.cjs +6 -6
  4. package/dist/adapters/express/index.js +73 -69
  5. package/dist/adapters/fastify/index.cjs +6 -6
  6. package/dist/adapters/fastify/index.js +133 -129
  7. package/dist/adapters/google-BAK9pnQf.cjs +2 -0
  8. package/dist/adapters/google-DficVAsJ.js +146 -0
  9. package/dist/adapters/{handler-BV4JuWNW.js → handler-9Rdn7zM2.js} +537 -457
  10. package/dist/adapters/handler-B-tEntiU.cjs +39 -0
  11. package/dist/adapters/hono/index.cjs +6 -6
  12. package/dist/adapters/hono/index.js +199 -195
  13. package/dist/adapters/index-BIMhF5KZ.cjs +23 -0
  14. package/dist/adapters/index-BgCeQBuN.cjs +2 -0
  15. package/dist/adapters/index-C45_meK_.js +719 -0
  16. package/dist/adapters/index-CFEJ62GJ.js +5337 -0
  17. package/dist/adapters/nextjs/index.cjs +5 -5
  18. package/dist/adapters/nextjs/index.js +215 -211
  19. package/dist/adapters/openai-CUSRuKTk.js +131 -0
  20. package/dist/adapters/openai-mLo2MCat.cjs +1 -0
  21. package/dist/client/components/AgenticNotebook/AgentChatPanel.d.ts +3 -0
  22. package/dist/client/components/AgenticNotebook/index.d.ts +6 -0
  23. package/dist/client/hooks/useAgentChat.d.ts +6 -0
  24. package/dist/client/index.js +730 -697
  25. package/dist/client/index.js.map +1 -1
  26. package/dist/client/styles.css +1 -1
  27. package/dist/client-bundle-stats.html +1 -1
  28. package/dist/server/anthropic-BTkjgFpT.cjs +1 -0
  29. package/dist/server/anthropic-CTu9E801.js +126 -0
  30. package/dist/server/google-BAK9pnQf.cjs +2 -0
  31. package/dist/server/google-DficVAsJ.js +146 -0
  32. package/dist/server/index-BIMhF5KZ.cjs +23 -0
  33. package/dist/server/index-BgCeQBuN.cjs +2 -0
  34. package/dist/server/index-C45_meK_.js +719 -0
  35. package/dist/server/index-CFEJ62GJ.js +5337 -0
  36. package/dist/server/index.cjs +51 -45
  37. package/dist/server/index.d.ts +49 -10
  38. package/dist/server/index.js +1978 -1898
  39. package/dist/server/openai-CUSRuKTk.js +131 -0
  40. package/dist/server/openai-mLo2MCat.cjs +1 -0
  41. package/package.json +12 -2
  42. package/dist/adapters/handler-D4MVKkVy.cjs +0 -33
@@ -0,0 +1,131 @@
1
+ class u {
2
+ client;
3
+ apiKey;
4
+ baseURL;
5
+ initialized = !1;
6
+ constructor(e, t) {
7
+ this.apiKey = e, this.baseURL = t?.baseURL;
8
+ }
9
+ async ensureClient() {
10
+ if (this.initialized) return;
11
+ let e;
12
+ try {
13
+ const s = await import(
14
+ /* webpackIgnore: true */
15
+ "./index-CFEJ62GJ.js"
16
+ );
17
+ e = s.default || s.OpenAI || s;
18
+ } catch {
19
+ throw new Error("openai is required for the OpenAI provider. Install it with: npm install openai");
20
+ }
21
+ const t = { apiKey: this.apiKey };
22
+ this.baseURL && (t.baseURL = this.baseURL), this.client = new e(t), this.initialized = !0;
23
+ }
24
+ async createStream(e) {
25
+ await this.ensureClient();
26
+ const { messages: t } = this.formatMessages(e.messages, e.system);
27
+ return this.client.chat.completions.create({
28
+ model: e.model,
29
+ max_completion_tokens: e.maxTokens,
30
+ tools: this.formatTools(e.tools),
31
+ messages: t,
32
+ stream: !0,
33
+ stream_options: { include_usage: !0 }
34
+ });
35
+ }
36
+ async *parseStreamEvents(e) {
37
+ const t = /* @__PURE__ */ new Map();
38
+ for await (const s of e) {
39
+ const n = s;
40
+ n.usage && (yield {
41
+ type: "message_meta",
42
+ inputTokens: n.usage.prompt_tokens,
43
+ outputTokens: n.usage.completion_tokens,
44
+ stopReason: ""
45
+ // stop reason comes from choices
46
+ });
47
+ const l = n.choices?.[0];
48
+ if (!l) continue;
49
+ const r = l.delta;
50
+ if (r) {
51
+ if (r.content && (yield { type: "text_delta", text: r.content }), r.tool_calls)
52
+ for (const i of r.tool_calls) {
53
+ const a = i.index ?? 0;
54
+ if (i.id && (t.set(a, { id: i.id, name: i.function?.name || "", arguments: "" }), yield { type: "tool_use_start", id: i.id, name: i.function?.name || "" }), i.function?.name && t.has(a)) {
55
+ const o = t.get(a);
56
+ o.name || (o.name = i.function.name);
57
+ }
58
+ if (i.function?.arguments) {
59
+ const o = t.get(a);
60
+ o && (o.arguments += i.function.arguments, yield { type: "tool_input_delta", json: i.function.arguments });
61
+ }
62
+ }
63
+ if (l.finish_reason) {
64
+ for (const [i, a] of t) {
65
+ let o = {};
66
+ try {
67
+ a.arguments && (o = JSON.parse(a.arguments));
68
+ } catch {
69
+ }
70
+ yield { type: "tool_use_end", id: a.id, input: o }, t.delete(i);
71
+ }
72
+ yield {
73
+ type: "message_meta",
74
+ stopReason: l.finish_reason
75
+ };
76
+ }
77
+ }
78
+ }
79
+ }
80
+ formatTools(e) {
81
+ return e.map((t) => ({
82
+ type: "function",
83
+ function: {
84
+ name: t.name,
85
+ description: t.description,
86
+ parameters: t.parameters
87
+ }
88
+ }));
89
+ }
90
+ formatMessages(e, t) {
91
+ const s = [{ role: "system", content: t }];
92
+ for (const n of e)
93
+ if (n.role === "user")
94
+ s.push({ role: "user", content: typeof n.content == "string" ? n.content : JSON.stringify(n.content) });
95
+ else if (n.role === "assistant")
96
+ if (typeof n.content == "string")
97
+ s.push({ role: "assistant", content: n.content });
98
+ else {
99
+ const l = n.content, r = l.filter((o) => o.type === "text").map((o) => o.text).join(""), i = l.filter((o) => o.type === "tool_use").map((o) => ({
100
+ id: o.id,
101
+ type: "function",
102
+ function: {
103
+ name: o.name,
104
+ arguments: JSON.stringify(o.input || {})
105
+ }
106
+ })), a = { role: "assistant" };
107
+ r && (a.content = r), i.length > 0 && (a.tool_calls = i), s.push(a);
108
+ }
109
+ else n.role === "tool" ? s.push(n) : n.role === "tool_result" && s.push({ role: "user", content: typeof n.content == "string" ? n.content : JSON.stringify(n.content) });
110
+ return { messages: s };
111
+ }
112
+ formatToolResults(e) {
113
+ return e.map((t) => ({
114
+ role: "tool",
115
+ tool_call_id: t.toolUseId,
116
+ content: t.content
117
+ }));
118
+ }
119
+ shouldContinue(e) {
120
+ return e === "tool_calls";
121
+ }
122
+ formatError(e) {
123
+ if (!e || !(e instanceof Error))
124
+ return "Something went wrong. Please try again.";
125
+ const t = e.message || "", s = e;
126
+ return s.status === 429 ? "Too many requests. Please wait a moment and try again." : s.status === 401 ? "Authentication failed. Please check your API key configuration." : s.status === 503 || s.status === 502 ? "The AI service is temporarily unavailable. Please try again in a moment." : s.status === 400 ? "There was a problem with the request. Please try again." : t.startsWith("{") || t.startsWith("Error: {") ? "The AI service encountered an error. Please try again." : t;
127
+ }
128
+ }
129
+ export {
130
+ u as OpenAIProvider
131
+ };
@@ -0,0 +1 @@
1
+ "use strict";Object.defineProperty(exports,Symbol.toStringTag,{value:"Module"});class c{client;apiKey;baseURL;initialized=!1;constructor(e,t){this.apiKey=e,this.baseURL=t?.baseURL}async ensureClient(){if(this.initialized)return;let e;try{const s=await Promise.resolve().then(()=>require("./index-BIMhF5KZ.cjs"));e=s.default||s.OpenAI||s}catch{throw new Error("openai is required for the OpenAI provider. Install it with: npm install openai")}const t={apiKey:this.apiKey};this.baseURL&&(t.baseURL=this.baseURL),this.client=new e(t),this.initialized=!0}async createStream(e){await this.ensureClient();const{messages:t}=this.formatMessages(e.messages,e.system);return this.client.chat.completions.create({model:e.model,max_completion_tokens:e.maxTokens,tools:this.formatTools(e.tools),messages:t,stream:!0,stream_options:{include_usage:!0}})}async*parseStreamEvents(e){const t=new Map;for await(const s of e){const n=s;n.usage&&(yield{type:"message_meta",inputTokens:n.usage.prompt_tokens,outputTokens:n.usage.completion_tokens,stopReason:""});const l=n.choices?.[0];if(!l)continue;const r=l.delta;if(r){if(r.content&&(yield{type:"text_delta",text:r.content}),r.tool_calls)for(const i of r.tool_calls){const a=i.index??0;if(i.id&&(t.set(a,{id:i.id,name:i.function?.name||"",arguments:""}),yield{type:"tool_use_start",id:i.id,name:i.function?.name||""}),i.function?.name&&t.has(a)){const o=t.get(a);o.name||(o.name=i.function.name)}if(i.function?.arguments){const o=t.get(a);o&&(o.arguments+=i.function.arguments,yield{type:"tool_input_delta",json:i.function.arguments})}}if(l.finish_reason){for(const[i,a]of t){let o={};try{a.arguments&&(o=JSON.parse(a.arguments))}catch{}yield{type:"tool_use_end",id:a.id,input:o},t.delete(i)}yield{type:"message_meta",stopReason:l.finish_reason}}}}}formatTools(e){return e.map(t=>({type:"function",function:{name:t.name,description:t.description,parameters:t.parameters}}))}formatMessages(e,t){const s=[{role:"system",content:t}];for(const n of e)if(n.role==="user")s.push({role:"user",content:typeof n.content=="string"?n.content:JSON.stringify(n.content)});else if(n.role==="assistant")if(typeof n.content=="string")s.push({role:"assistant",content:n.content});else{const l=n.content,r=l.filter(o=>o.type==="text").map(o=>o.text).join(""),i=l.filter(o=>o.type==="tool_use").map(o=>({id:o.id,type:"function",function:{name:o.name,arguments:JSON.stringify(o.input||{})}})),a={role:"assistant"};r&&(a.content=r),i.length>0&&(a.tool_calls=i),s.push(a)}else n.role==="tool"?s.push(n):n.role==="tool_result"&&s.push({role:"user",content:typeof n.content=="string"?n.content:JSON.stringify(n.content)});return{messages:s}}formatToolResults(e){return e.map(t=>({role:"tool",tool_call_id:t.toolUseId,content:t.content}))}shouldContinue(e){return e==="tool_calls"}formatError(e){if(!e||!(e instanceof Error))return"Something went wrong. Please try again.";const t=e.message||"",s=e;return s.status===429?"Too many requests. Please wait a moment and try again.":s.status===401?"Authentication failed. Please check your API key configuration.":s.status===503||s.status===502?"The AI service is temporarily unavailable. Please try again in a moment.":s.status===400?"There was a problem with the request. Please try again.":t.startsWith("{")||t.startsWith("Error: {")?"The AI service encountered an error. Please try again.":t}}exports.OpenAIProvider=c;
@@ -2,6 +2,9 @@ import { default as React } from 'react';
2
2
  interface AgentChatPanelProps {
3
3
  agentEndpoint?: string;
4
4
  agentApiKey?: string;
5
+ agentProvider?: string;
6
+ agentModel?: string;
7
+ agentProviderEndpoint?: string;
5
8
  onClear?: () => void;
6
9
  /** Called when the agent saves a dashboard. Presence enables the "Save as Dashboard" button. */
7
10
  onDashboardSaved?: (data: {
@@ -8,6 +8,12 @@ export interface AgenticNotebookProps {
8
8
  agentEndpoint?: string;
9
9
  /** Client-side API key (for demo/try-site use) */
10
10
  agentApiKey?: string;
11
+ /** Override LLM provider (anthropic | openai | google) */
12
+ agentProvider?: string;
13
+ /** Override LLM model (e.g. 'gpt-4o', 'gemini-2.0-flash') */
14
+ agentModel?: string;
15
+ /** Override provider endpoint URL (for OpenAI-compatible services) */
16
+ agentProviderEndpoint?: string;
11
17
  /** Callback when notebook state changes (for persistence) */
12
18
  onSave?: (config: NotebookConfig) => void | Promise<void>;
13
19
  /** Callback when dirty state changes */
@@ -4,6 +4,12 @@ export interface UseAgentChatOptions {
4
4
  agentEndpoint?: string;
5
5
  /** Client-side API key for demo/try-site use */
6
6
  agentApiKey?: string;
7
+ /** Override LLM provider (anthropic | openai | google) */
8
+ agentProvider?: string;
9
+ /** Override LLM model (e.g. 'gpt-4o', 'gemini-2.0-flash') */
10
+ agentModel?: string;
11
+ /** Override provider endpoint URL (for OpenAI-compatible services) */
12
+ agentProviderEndpoint?: string;
7
13
  /** Called when agent adds a portlet to the notebook */
8
14
  onAddPortlet: (data: PortletBlock) => void;
9
15
  /** Called when agent adds a markdown block to the notebook */