blink 0.1.74 → 0.1.76

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (55) hide show
  1. package/dist/browser/agent/client/index.cjs +11 -11
  2. package/dist/browser/agent/client/index.d.cts +3 -3
  3. package/dist/browser/agent/client/index.d.ts +3 -3
  4. package/dist/browser/agent/client/index.js +11 -11
  5. package/dist/browser/agent/index.browser.d.cts +2 -2
  6. package/dist/browser/agent/index.browser.d.ts +2 -2
  7. package/dist/browser/{chat-B5eFQu26.d.cts → chat-B3FNVJ5h.d.cts} +1 -1
  8. package/dist/browser/{chat-BN7SYTWY.d.ts → chat-DDcZEZzi.d.ts} +1 -1
  9. package/dist/browser/control/index.d.cts +1 -1
  10. package/dist/browser/control/index.d.ts +1 -1
  11. package/dist/browser/{index-B5faDAC2.d.ts → index-B5ywu36Q.d.ts} +2 -2
  12. package/dist/browser/{index-C-GJWB97.d.cts → index-CiKnRuyK.d.cts} +2 -2
  13. package/dist/browser/{index.browser-CnGab8Yp.d.cts → index.browser-Bivj_NTA.d.cts} +1 -1
  14. package/dist/browser/{index.browser-BUMCZUeQ.d.ts → index.browser-BzfJXoff.d.ts} +1 -1
  15. package/dist/browser/{index.node-mda_IL6K.cjs → index.node-BrnIuz4k.cjs} +1 -1
  16. package/dist/browser/{index.node-DJv-HF6j.js → index.node-CbQFCWTr.js} +1 -1
  17. package/dist/browser/react/index.cjs +1 -1
  18. package/dist/browser/react/index.d.cts +3 -3
  19. package/dist/browser/react/index.d.ts +3 -3
  20. package/dist/browser/react/index.js +1 -1
  21. package/dist/cli/auth-BfmMOPP_.js +74 -0
  22. package/dist/cli/{chat-L0VBV1Zr.js → chat-CChZ6Dgt.js} +1 -1
  23. package/dist/cli/{connect-Cxa-uIEb.js → connect-D8IAxl-3.js} +3 -3
  24. package/dist/cli/connect-DSyrG7KH.js +1 -0
  25. package/dist/cli/{dev-CMwHkKBI.js → dev-IdmzRYNF.js} +62 -56
  26. package/dist/cli/{getMachineId-bsd-BZVr5tA4.js → getMachineId-bsd-Qx2HBGoH.js} +1 -1
  27. package/dist/cli/{getMachineId-darwin-CovNSxTh.js → getMachineId-darwin--C6ZfZar.js} +1 -1
  28. package/dist/cli/{getMachineId-linux-UJ7XEIYa.js → getMachineId-linux-DLLDRjuc.js} +1 -1
  29. package/dist/cli/{getMachineId-unsupported-DfUiEY7G.js → getMachineId-unsupported-C3vmCtXX.js} +1 -1
  30. package/dist/cli/{getMachineId-win-DYgilU22.js → getMachineId-win-CPTJv39f.js} +1 -1
  31. package/dist/cli/{util-D3Iuwmav.js → ignore-BSx5uoz9.js} +1 -1
  32. package/dist/cli/index.js +4 -4
  33. package/dist/cli/{init-B6vWokUI.js → init-D-601hh-.js} +2 -2
  34. package/dist/cli/login-CxmH7hHe.js +1 -0
  35. package/dist/cli/undici-DD9e2s8L.js +1 -0
  36. package/dist/node/agent/index.node.cjs +26 -6
  37. package/dist/node/agent/index.node.d.cts +2 -2
  38. package/dist/node/agent/index.node.d.ts +2 -2
  39. package/dist/node/agent/index.node.js +26 -6
  40. package/dist/node/{index.node-iaeCqAo6.d.cts → index.node-CI1vYz48.d.ts} +22 -4
  41. package/dist/node/{index.node-BBpGHHxe.d.ts → index.node-rD4Zd2xs.d.cts} +22 -4
  42. package/dist/node/test.d.cts +1 -1
  43. package/dist/node/test.d.ts +1 -1
  44. package/package.json +3 -2
  45. package/dist/cli/auth-BUNIOupD.js +0 -30
  46. package/dist/cli/connect-BHyGYU8L.js +0 -1
  47. package/dist/cli/dist-NqrnQGst.js +0 -45
  48. package/dist/cli/login-DKW_KVCA.js +0 -1
  49. package/dist/cli/undici-jJcag_Yq.js +0 -1
  50. /package/dist/cli/{devtools-BS9tk1Y9.js → devtools-BLozvXLU.js} +0 -0
  51. /package/dist/cli/{esm-DmR1E1ox.js → esm-Dh_XeXMF.js} +0 -0
  52. /package/dist/cli/{events-CWgtj8lb.js → events-NKQ6g4j7.js} +0 -0
  53. /package/dist/cli/{execAsync-q2CSWc1b.js → execAsync-Bms0M8aD.js} +0 -0
  54. /package/dist/cli/{open-CSMQaj0E.js → open-FpexaS6S.js} +0 -0
  55. /package/dist/cli/{undici-BG07ys6c.js → undici-DSA55kyn.js} +0 -0
@@ -1,8 +1,9 @@
1
+ import * as http from "http";
2
+ import { InferToolInput, InferToolOutput, InferUIMessageChunk, StreamTextResult, Tool, ToolSet, UIMessage } from "ai";
1
3
  import * as hono_utils_http_status0 from "hono/utils/http-status";
2
4
  import * as hono_types0 from "hono/types";
3
5
  import * as hono_hono_base0 from "hono/hono-base";
4
- import { InferToolInput, InferToolOutput, InferUIMessageChunk, StreamTextResult, Tool, ToolSet, UIMessage } from "ai";
5
- import * as http from "http";
6
+ import * as _ai_sdk_provider0 from "@ai-sdk/provider";
6
7
 
7
8
  //#region src/agent/context.d.ts
8
9
  interface Context {
@@ -120,7 +121,7 @@ interface MessageOptions {
120
121
  readonly createdAt?: Date;
121
122
  }
122
123
  interface ChatEvent<MESSAGE extends UIMessage> {
123
- readonly key: string;
124
+ readonly chat: Chat;
124
125
  readonly messages: MESSAGE[];
125
126
  readonly abortSignal?: AbortSignal;
126
127
  readonly context: Context;
@@ -348,6 +349,23 @@ declare const api: hono_hono_base0.HonoBase<{
348
349
  };
349
350
  };
350
351
  }), "/">;
352
+ /**
353
+ * model returns an AI-SDK model provider that can be
354
+ * used with `streamText`, `generateText`, etc.
355
+ *
356
+ * It simply proxies to the Vercel AI Gateway, with no
357
+ * additional cost.
358
+ *
359
+ * Find model names at: https://vercel.com/ai-gateway/models.
360
+ *
361
+ * Common:
362
+ * "anthropic/claude-sonnet-4.5"
363
+ * "anthropic/claude-sonnet-4"
364
+ * "openai/gpt-5"
365
+ *
366
+ * @param model the model name. browse models at: https://vercel.com/ai-gateway/models.
367
+ */
368
+ declare const model: (model: string) => _ai_sdk_provider0.LanguageModelV2;
351
369
  //#endregion
352
370
  //#region src/agent/tools.d.ts
353
371
  /**
@@ -457,4 +475,4 @@ type StreamResponseFormat = "ui-message" | "openai-chat" | "openai-response" | "
457
475
  declare const StreamResponseFormatHeader = "x-blink-stream-response-format";
458
476
  declare function withResponseFormat(response: Response, format: StreamResponseFormat): Response;
459
477
  //#endregion
460
- export { Agent, Chat, ChatBehavior, ChatContext, ChatEvent, ChatHandler, ChatResponse, Context, ContextFromTools, EnsuredChat, ErrorHandler, ExtractUIOptions, MessageOptions, RequestHandler, ServeOptions, StoreContext, StreamResponseFormat, StreamResponseFormatHeader, ToolApprovalOutput, ToolSetWithApproval, ToolWithApproval, ToolWithContext, UIEvent, UIHandler, UIOptionSelect, UIOptionSelectValue, UIOptions, UIOptionsSchema, WithUIOptions, agent, api, isToolApprovalOutput, lastUIOptions, toolWithApproval, tools, withResponseFormat };
478
+ export { Agent, Chat, ChatBehavior, ChatContext, ChatEvent, ChatHandler, ChatResponse, Context, ContextFromTools, EnsuredChat, ErrorHandler, ExtractUIOptions, MessageOptions, RequestHandler, ServeOptions, StoreContext, StreamResponseFormat, StreamResponseFormatHeader, ToolApprovalOutput, ToolSetWithApproval, ToolWithApproval, ToolWithContext, UIEvent, UIHandler, UIOptionSelect, UIOptionSelectValue, UIOptions, UIOptionsSchema, WithUIOptions, agent, api, isToolApprovalOutput, lastUIOptions, model, toolWithApproval, tools, withResponseFormat };
@@ -1,8 +1,9 @@
1
- import * as http from "http";
2
- import { InferToolInput, InferToolOutput, InferUIMessageChunk, StreamTextResult, Tool, ToolSet, UIMessage } from "ai";
3
1
  import * as hono_utils_http_status0 from "hono/utils/http-status";
4
2
  import * as hono_types0 from "hono/types";
5
3
  import * as hono_hono_base0 from "hono/hono-base";
4
+ import * as _ai_sdk_provider0 from "@ai-sdk/provider";
5
+ import { InferToolInput, InferToolOutput, InferUIMessageChunk, StreamTextResult, Tool, ToolSet, UIMessage } from "ai";
6
+ import * as http from "http";
6
7
 
7
8
  //#region src/agent/context.d.ts
8
9
  interface Context {
@@ -120,7 +121,7 @@ interface MessageOptions {
120
121
  readonly createdAt?: Date;
121
122
  }
122
123
  interface ChatEvent<MESSAGE extends UIMessage> {
123
- readonly key: string;
124
+ readonly chat: Chat;
124
125
  readonly messages: MESSAGE[];
125
126
  readonly abortSignal?: AbortSignal;
126
127
  readonly context: Context;
@@ -348,6 +349,23 @@ declare const api: hono_hono_base0.HonoBase<{
348
349
  };
349
350
  };
350
351
  }), "/">;
352
+ /**
353
+ * model returns an AI-SDK model provider that can be
354
+ * used with `streamText`, `generateText`, etc.
355
+ *
356
+ * It simply proxies to the Vercel AI Gateway, with no
357
+ * additional cost.
358
+ *
359
+ * Find model names at: https://vercel.com/ai-gateway/models.
360
+ *
361
+ * Common:
362
+ * "anthropic/claude-sonnet-4.5"
363
+ * "anthropic/claude-sonnet-4"
364
+ * "openai/gpt-5"
365
+ *
366
+ * @param model the model name. browse models at: https://vercel.com/ai-gateway/models.
367
+ */
368
+ declare const model: (model: string) => _ai_sdk_provider0.LanguageModelV2;
351
369
  //#endregion
352
370
  //#region src/agent/tools.d.ts
353
371
  /**
@@ -457,4 +475,4 @@ type StreamResponseFormat = "ui-message" | "openai-chat" | "openai-response" | "
457
475
  declare const StreamResponseFormatHeader = "x-blink-stream-response-format";
458
476
  declare function withResponseFormat(response: Response, format: StreamResponseFormat): Response;
459
477
  //#endregion
460
- export { Agent, Chat, ChatBehavior, ChatContext, ChatEvent, ChatHandler, ChatResponse, Context, ContextFromTools, EnsuredChat, ErrorHandler, ExtractUIOptions, MessageOptions, RequestHandler, ServeOptions, StoreContext, StreamResponseFormat, StreamResponseFormatHeader, ToolApprovalOutput, ToolSetWithApproval, ToolWithApproval, ToolWithContext, UIEvent, UIHandler, UIOptionSelect, UIOptionSelectValue, UIOptions, UIOptionsSchema, WithUIOptions, agent, api, isToolApprovalOutput, lastUIOptions, toolWithApproval, tools, withResponseFormat };
478
+ export { Agent, Chat, ChatBehavior, ChatContext, ChatEvent, ChatHandler, ChatResponse, Context, ContextFromTools, EnsuredChat, ErrorHandler, ExtractUIOptions, MessageOptions, RequestHandler, ServeOptions, StoreContext, StreamResponseFormat, StreamResponseFormatHeader, ToolApprovalOutput, ToolSetWithApproval, ToolWithApproval, ToolWithContext, UIEvent, UIHandler, UIOptionSelect, UIOptionSelectValue, UIOptions, UIOptionsSchema, WithUIOptions, agent, api, isToolApprovalOutput, lastUIOptions, model, toolWithApproval, tools, withResponseFormat };
@@ -1,4 +1,4 @@
1
- import { Chat, UIOptions, UIOptionsSchema } from "./index.node-iaeCqAo6.cjs";
1
+ import { Chat, UIOptions, UIOptionsSchema } from "./index.node-rD4Zd2xs.cjs";
2
2
  import { UIMessage, UIMessageChunk } from "ai";
3
3
 
4
4
  //#region src/agent/client/index.d.ts
@@ -1,4 +1,4 @@
1
- import { Chat, UIOptions, UIOptionsSchema } from "./index.node-BBpGHHxe.js";
1
+ import { Chat, UIOptions, UIOptionsSchema } from "./index.node-CI1vYz48.js";
2
2
  import { UIMessage, UIMessageChunk } from "ai";
3
3
 
4
4
  //#region src/agent/client/index.d.ts
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "blink",
3
- "version": "0.1.74",
3
+ "version": "0.1.76",
4
4
  "description": "Blink is a JavaScript runtime for building and deploying AI agents.",
5
5
  "type": "module",
6
6
  "bin": {
@@ -67,7 +67,7 @@
67
67
  "@ai-sdk/openai-compatible": "^1.0.19",
68
68
  "@ai-sdk/react": "^2.0.35",
69
69
  "@ai-sdk/xai": "^2.0.16",
70
- "@blink.so/api": "0.0.8",
70
+ "@blink.so/api": "^0.0.11",
71
71
  "@clack/prompts": "^0.11.0",
72
72
  "@hono/node-server": "^1.19.3",
73
73
  "@hugodutka/gemini-cli": "^0.6.0-nightly-20250912-5",
@@ -78,6 +78,7 @@
78
78
  "eventsource-parser": "^3.0.6",
79
79
  "filenamify": "^7.0.0",
80
80
  "hono": "^4.9.8",
81
+ "ignore": "^7.0.2",
81
82
  "ink": "^6.2.3",
82
83
  "ink-spinner": "^5.0.0",
83
84
  "ink-text-input": "^6.0.0",