langchain 0.0.177 → 0.0.178

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (35) hide show
  1. package/chat_models/iflytek_xinghuo/web.cjs +1 -0
  2. package/chat_models/iflytek_xinghuo/web.d.ts +1 -0
  3. package/chat_models/iflytek_xinghuo/web.js +1 -0
  4. package/chat_models/iflytek_xinghuo.cjs +1 -0
  5. package/chat_models/iflytek_xinghuo.d.ts +1 -0
  6. package/chat_models/iflytek_xinghuo.js +1 -0
  7. package/dist/chat_models/cloudflare_workersai.cjs +70 -24
  8. package/dist/chat_models/cloudflare_workersai.d.ts +6 -2
  9. package/dist/chat_models/cloudflare_workersai.js +71 -25
  10. package/dist/chat_models/iflytek_xinghuo/common.cjs +335 -0
  11. package/dist/chat_models/iflytek_xinghuo/common.d.ts +165 -0
  12. package/dist/chat_models/iflytek_xinghuo/common.js +331 -0
  13. package/dist/chat_models/iflytek_xinghuo/index.cjs +35 -0
  14. package/dist/chat_models/iflytek_xinghuo/index.d.ts +5 -0
  15. package/dist/chat_models/iflytek_xinghuo/index.js +28 -0
  16. package/dist/chat_models/iflytek_xinghuo/web.cjs +30 -0
  17. package/dist/chat_models/iflytek_xinghuo/web.d.ts +5 -0
  18. package/dist/chat_models/iflytek_xinghuo/web.js +26 -0
  19. package/dist/graphs/neo4j_graph.cjs +36 -5
  20. package/dist/graphs/neo4j_graph.js +14 -3
  21. package/dist/llms/cloudflare_workersai.cjs +59 -13
  22. package/dist/llms/cloudflare_workersai.d.ts +9 -3
  23. package/dist/llms/cloudflare_workersai.js +59 -13
  24. package/dist/load/import_constants.cjs +2 -0
  25. package/dist/load/import_constants.js +2 -0
  26. package/dist/prompts/chat.cjs +8 -0
  27. package/dist/prompts/chat.d.ts +5 -0
  28. package/dist/prompts/chat.js +8 -0
  29. package/dist/util/event-source-parse.cjs +20 -1
  30. package/dist/util/event-source-parse.d.ts +2 -0
  31. package/dist/util/event-source-parse.js +18 -0
  32. package/dist/util/iflytek_websocket_stream.cjs +81 -0
  33. package/dist/util/iflytek_websocket_stream.d.ts +27 -0
  34. package/dist/util/iflytek_websocket_stream.js +77 -0
  35. package/package.json +22 -1
@@ -1,10 +1,30 @@
1
1
  "use strict";
2
- var __importDefault = (this && this.__importDefault) || function (mod) {
3
- return (mod && mod.__esModule) ? mod : { "default": mod };
2
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
3
+ if (k2 === undefined) k2 = k;
4
+ var desc = Object.getOwnPropertyDescriptor(m, k);
5
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
6
+ desc = { enumerable: true, get: function() { return m[k]; } };
7
+ }
8
+ Object.defineProperty(o, k2, desc);
9
+ }) : (function(o, m, k, k2) {
10
+ if (k2 === undefined) k2 = k;
11
+ o[k2] = m[k];
12
+ }));
13
+ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
14
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
15
+ }) : function(o, v) {
16
+ o["default"] = v;
17
+ });
18
+ var __importStar = (this && this.__importStar) || function (mod) {
19
+ if (mod && mod.__esModule) return mod;
20
+ var result = {};
21
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
22
+ __setModuleDefault(result, mod);
23
+ return result;
4
24
  };
5
25
  Object.defineProperty(exports, "__esModule", { value: true });
6
26
  exports.Neo4jGraph = void 0;
7
- const neo4j_driver_1 = __importDefault(require("neo4j-driver"));
27
+ const neo4j_driver_1 = __importStar(require("neo4j-driver"));
8
28
  /**
9
29
  * @security *Security note*: Make sure that the database connection uses credentials
10
30
  * that are narrowly-scoped to only include necessary permissions.
@@ -61,7 +81,12 @@ class Neo4jGraph {
61
81
  // eslint-disable-next-line @typescript-eslint/no-explicit-any
62
82
  }
63
83
  catch (error) {
64
- throw new Error(`Error: ${error.message}`);
84
+ const message = [
85
+ "Could not use APOC procedures.",
86
+ "Please ensure the APOC plugin is installed in Neo4j and that",
87
+ "'apoc.meta.data()' is allowed in Neo4j configuration",
88
+ ].join("\n");
89
+ throw new Error(message);
65
90
  }
66
91
  finally {
67
92
  console.log("Schema refreshed successfully.");
@@ -78,9 +103,15 @@ class Neo4jGraph {
78
103
  database: this.database,
79
104
  });
80
105
  return toObjects(result.records);
106
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
81
107
  }
82
108
  catch (error) {
83
- // ignore errors
109
+ if (
110
+ // eslint-disable-next-line
111
+ error instanceof neo4j_driver_1.Neo4jError &&
112
+ error.code === "Neo.ClientError.Procedure.ProcedureNotFound") {
113
+ throw new Error("Procedure not found in Neo4j.");
114
+ }
84
115
  }
85
116
  return undefined;
86
117
  }
@@ -1,4 +1,4 @@
1
- import neo4j from "neo4j-driver";
1
+ import neo4j, { Neo4jError } from "neo4j-driver";
2
2
  /**
3
3
  * @security *Security note*: Make sure that the database connection uses credentials
4
4
  * that are narrowly-scoped to only include necessary permissions.
@@ -55,7 +55,12 @@ export class Neo4jGraph {
55
55
  // eslint-disable-next-line @typescript-eslint/no-explicit-any
56
56
  }
57
57
  catch (error) {
58
- throw new Error(`Error: ${error.message}`);
58
+ const message = [
59
+ "Could not use APOC procedures.",
60
+ "Please ensure the APOC plugin is installed in Neo4j and that",
61
+ "'apoc.meta.data()' is allowed in Neo4j configuration",
62
+ ].join("\n");
63
+ throw new Error(message);
59
64
  }
60
65
  finally {
61
66
  console.log("Schema refreshed successfully.");
@@ -72,9 +77,15 @@ export class Neo4jGraph {
72
77
  database: this.database,
73
78
  });
74
79
  return toObjects(result.records);
80
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
75
81
  }
76
82
  catch (error) {
77
- // ignore errors
83
+ if (
84
+ // eslint-disable-next-line
85
+ error instanceof Neo4jError &&
86
+ error.code === "Neo.ClientError.Procedure.ProcedureNotFound") {
87
+ throw new Error("Procedure not found in Neo4j.");
88
+ }
78
89
  }
79
90
  return undefined;
80
91
  }
@@ -3,6 +3,8 @@ Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.CloudflareWorkersAI = void 0;
4
4
  const base_js_1 = require("./base.cjs");
5
5
  const env_js_1 = require("../util/env.cjs");
6
+ const index_js_1 = require("../schema/index.cjs");
7
+ const event_source_parse_js_1 = require("../util/event-source-parse.cjs");
6
8
  /**
7
9
  * Class representing the CloudflareWorkersAI language model. It extends the LLM (Large
8
10
  * Language Model) class, providing a standard interface for interacting
@@ -38,6 +40,12 @@ class CloudflareWorkersAI extends base_js_1.LLM {
38
40
  writable: true,
39
41
  value: void 0
40
42
  });
43
+ Object.defineProperty(this, "streaming", {
44
+ enumerable: true,
45
+ configurable: true,
46
+ writable: true,
47
+ value: false
48
+ });
41
49
  Object.defineProperty(this, "lc_serializable", {
42
50
  enumerable: true,
43
51
  configurable: true,
@@ -45,6 +53,7 @@ class CloudflareWorkersAI extends base_js_1.LLM {
45
53
  value: true
46
54
  });
47
55
  this.model = fields?.model ?? this.model;
56
+ this.streaming = fields?.streaming ?? this.streaming;
48
57
  this.cloudflareAccountId =
49
58
  fields?.cloudflareAccountId ??
50
59
  (0, env_js_1.getEnvironmentVariable)("CLOUDFLARE_ACCOUNT_ID");
@@ -87,23 +96,15 @@ class CloudflareWorkersAI extends base_js_1.LLM {
87
96
  _llmType() {
88
97
  return "cloudflare";
89
98
  }
90
- /** Call out to CloudflareWorkersAI's complete endpoint.
91
- Args:
92
- prompt: The prompt to pass into the model.
93
- Returns:
94
- The string generated by the model.
95
- Example:
96
- let response = CloudflareWorkersAI.call("Tell me a joke.");
97
- */
98
- async _call(prompt, options) {
99
+ async _request(prompt, options, stream) {
99
100
  this.validateEnvironment();
100
101
  const url = `${this.baseUrl}/${this.model}`;
101
102
  const headers = {
102
103
  Authorization: `Bearer ${this.cloudflareApiToken}`,
103
104
  "Content-Type": "application/json",
104
105
  };
105
- const data = { prompt };
106
- const responseData = await this.caller.call(async () => {
106
+ const data = { prompt, stream };
107
+ return this.caller.call(async () => {
107
108
  const response = await fetch(url, {
108
109
  method: "POST",
109
110
  headers,
@@ -116,9 +117,54 @@ class CloudflareWorkersAI extends base_js_1.LLM {
116
117
  error.response = response;
117
118
  throw error;
118
119
  }
119
- return response.json();
120
+ return response;
120
121
  });
121
- return responseData.result.response;
122
+ }
123
+ async *_streamResponseChunks(prompt, options, runManager) {
124
+ const response = await this._request(prompt, options, true);
125
+ if (!response.body) {
126
+ throw new Error("Empty response from Cloudflare. Please try again.");
127
+ }
128
+ const stream = (0, event_source_parse_js_1.convertEventStreamToIterableReadableDataStream)(response.body);
129
+ for await (const chunk of stream) {
130
+ if (chunk !== "[DONE]") {
131
+ const parsedChunk = JSON.parse(chunk);
132
+ const generationChunk = new index_js_1.GenerationChunk({
133
+ text: parsedChunk.response,
134
+ });
135
+ yield generationChunk;
136
+ // eslint-disable-next-line no-void
137
+ void runManager?.handleLLMNewToken(generationChunk.text ?? "");
138
+ }
139
+ }
140
+ }
141
+ /** Call out to CloudflareWorkersAI's complete endpoint.
142
+ Args:
143
+ prompt: The prompt to pass into the model.
144
+ Returns:
145
+ The string generated by the model.
146
+ Example:
147
+ let response = CloudflareWorkersAI.call("Tell me a joke.");
148
+ */
149
+ async _call(prompt, options, runManager) {
150
+ if (!this.streaming) {
151
+ const response = await this._request(prompt, options);
152
+ const responseData = await response.json();
153
+ return responseData.result.response;
154
+ }
155
+ else {
156
+ const stream = this._streamResponseChunks(prompt, options, runManager);
157
+ let finalResult;
158
+ for await (const chunk of stream) {
159
+ if (finalResult === undefined) {
160
+ finalResult = chunk;
161
+ }
162
+ else {
163
+ finalResult = finalResult.concat(chunk);
164
+ }
165
+ }
166
+ return finalResult?.text ?? "";
167
+ }
122
168
  }
123
169
  }
124
170
  exports.CloudflareWorkersAI = CloudflareWorkersAI;
@@ -1,4 +1,6 @@
1
1
  import { LLM, BaseLLMParams } from "./base.js";
2
+ import { CallbackManagerForLLMRun } from "../callbacks/manager.js";
3
+ import { GenerationChunk } from "../schema/index.js";
2
4
  /**
3
5
  * Interface for CloudflareWorkersAI input parameters.
4
6
  */
@@ -7,6 +9,7 @@ export interface CloudflareWorkersAIInput {
7
9
  cloudflareApiToken?: string;
8
10
  model?: string;
9
11
  baseUrl?: string;
12
+ streaming?: boolean;
10
13
  }
11
14
  /**
12
15
  * Class representing the CloudflareWorkersAI language model. It extends the LLM (Large
@@ -18,6 +21,7 @@ export declare class CloudflareWorkersAI extends LLM implements CloudflareWorker
18
21
  cloudflareAccountId?: string;
19
22
  cloudflareApiToken?: string;
20
23
  baseUrl: string;
24
+ streaming: boolean;
21
25
  static lc_name(): string;
22
26
  lc_serializable: boolean;
23
27
  constructor(fields?: CloudflareWorkersAIInput & BaseLLMParams);
@@ -37,13 +41,15 @@ export declare class CloudflareWorkersAI extends LLM implements CloudflareWorker
37
41
  };
38
42
  /** Get the type of LLM. */
39
43
  _llmType(): string;
44
+ _request(prompt: string, options: this["ParsedCallOptions"], stream?: boolean): Promise<Response>;
45
+ _streamResponseChunks(prompt: string, options: this["ParsedCallOptions"], runManager?: CallbackManagerForLLMRun): AsyncGenerator<GenerationChunk>;
40
46
  /** Call out to CloudflareWorkersAI's complete endpoint.
41
47
  Args:
42
48
  prompt: The prompt to pass into the model.
43
- Returns:
44
- The string generated by the model.
49
+ Returns:
50
+ The string generated by the model.
45
51
  Example:
46
52
  let response = CloudflareWorkersAI.call("Tell me a joke.");
47
53
  */
48
- _call(prompt: string, options: this["ParsedCallOptions"]): Promise<string>;
54
+ _call(prompt: string, options: this["ParsedCallOptions"], runManager?: CallbackManagerForLLMRun): Promise<string>;
49
55
  }
@@ -1,5 +1,7 @@
1
1
  import { LLM } from "./base.js";
2
2
  import { getEnvironmentVariable } from "../util/env.js";
3
+ import { GenerationChunk } from "../schema/index.js";
4
+ import { convertEventStreamToIterableReadableDataStream } from "../util/event-source-parse.js";
3
5
  /**
4
6
  * Class representing the CloudflareWorkersAI language model. It extends the LLM (Large
5
7
  * Language Model) class, providing a standard interface for interacting
@@ -35,6 +37,12 @@ export class CloudflareWorkersAI extends LLM {
35
37
  writable: true,
36
38
  value: void 0
37
39
  });
40
+ Object.defineProperty(this, "streaming", {
41
+ enumerable: true,
42
+ configurable: true,
43
+ writable: true,
44
+ value: false
45
+ });
38
46
  Object.defineProperty(this, "lc_serializable", {
39
47
  enumerable: true,
40
48
  configurable: true,
@@ -42,6 +50,7 @@ export class CloudflareWorkersAI extends LLM {
42
50
  value: true
43
51
  });
44
52
  this.model = fields?.model ?? this.model;
53
+ this.streaming = fields?.streaming ?? this.streaming;
45
54
  this.cloudflareAccountId =
46
55
  fields?.cloudflareAccountId ??
47
56
  getEnvironmentVariable("CLOUDFLARE_ACCOUNT_ID");
@@ -84,23 +93,15 @@ export class CloudflareWorkersAI extends LLM {
84
93
  _llmType() {
85
94
  return "cloudflare";
86
95
  }
87
- /** Call out to CloudflareWorkersAI's complete endpoint.
88
- Args:
89
- prompt: The prompt to pass into the model.
90
- Returns:
91
- The string generated by the model.
92
- Example:
93
- let response = CloudflareWorkersAI.call("Tell me a joke.");
94
- */
95
- async _call(prompt, options) {
96
+ async _request(prompt, options, stream) {
96
97
  this.validateEnvironment();
97
98
  const url = `${this.baseUrl}/${this.model}`;
98
99
  const headers = {
99
100
  Authorization: `Bearer ${this.cloudflareApiToken}`,
100
101
  "Content-Type": "application/json",
101
102
  };
102
- const data = { prompt };
103
- const responseData = await this.caller.call(async () => {
103
+ const data = { prompt, stream };
104
+ return this.caller.call(async () => {
104
105
  const response = await fetch(url, {
105
106
  method: "POST",
106
107
  headers,
@@ -113,8 +114,53 @@ export class CloudflareWorkersAI extends LLM {
113
114
  error.response = response;
114
115
  throw error;
115
116
  }
116
- return response.json();
117
+ return response;
117
118
  });
118
- return responseData.result.response;
119
+ }
120
+ async *_streamResponseChunks(prompt, options, runManager) {
121
+ const response = await this._request(prompt, options, true);
122
+ if (!response.body) {
123
+ throw new Error("Empty response from Cloudflare. Please try again.");
124
+ }
125
+ const stream = convertEventStreamToIterableReadableDataStream(response.body);
126
+ for await (const chunk of stream) {
127
+ if (chunk !== "[DONE]") {
128
+ const parsedChunk = JSON.parse(chunk);
129
+ const generationChunk = new GenerationChunk({
130
+ text: parsedChunk.response,
131
+ });
132
+ yield generationChunk;
133
+ // eslint-disable-next-line no-void
134
+ void runManager?.handleLLMNewToken(generationChunk.text ?? "");
135
+ }
136
+ }
137
+ }
138
+ /** Call out to CloudflareWorkersAI's complete endpoint.
139
+ Args:
140
+ prompt: The prompt to pass into the model.
141
+ Returns:
142
+ The string generated by the model.
143
+ Example:
144
+ let response = CloudflareWorkersAI.call("Tell me a joke.");
145
+ */
146
+ async _call(prompt, options, runManager) {
147
+ if (!this.streaming) {
148
+ const response = await this._request(prompt, options);
149
+ const responseData = await response.json();
150
+ return responseData.result.response;
151
+ }
152
+ else {
153
+ const stream = this._streamResponseChunks(prompt, options, runManager);
154
+ let finalResult;
155
+ for await (const chunk of stream) {
156
+ if (finalResult === undefined) {
157
+ finalResult = chunk;
158
+ }
159
+ else {
160
+ finalResult = finalResult.concat(chunk);
161
+ }
162
+ }
163
+ return finalResult?.text ?? "";
164
+ }
119
165
  }
120
166
  }
@@ -114,6 +114,8 @@ exports.optionalImportEntrypoints = [
114
114
  "langchain/chat_models/googlevertexai",
115
115
  "langchain/chat_models/googlevertexai/web",
116
116
  "langchain/chat_models/googlepalm",
117
+ "langchain/chat_models/iflytek_xinghuo",
118
+ "langchain/chat_models/iflytek_xinghuo/web",
117
119
  "langchain/chat_models/llama_cpp",
118
120
  "langchain/sql_db",
119
121
  "langchain/callbacks/handlers/llmonitor",
@@ -111,6 +111,8 @@ export const optionalImportEntrypoints = [
111
111
  "langchain/chat_models/googlevertexai",
112
112
  "langchain/chat_models/googlevertexai/web",
113
113
  "langchain/chat_models/googlepalm",
114
+ "langchain/chat_models/iflytek_xinghuo",
115
+ "langchain/chat_models/iflytek_xinghuo/web",
114
116
  "langchain/chat_models/llama_cpp",
115
117
  "langchain/sql_db",
116
118
  "langchain/callbacks/handlers/llmonitor",
@@ -374,6 +374,14 @@ class ChatPromptTemplate extends BaseChatPromptTemplate {
374
374
  };
375
375
  return new ChatPromptTemplate(promptDict);
376
376
  }
377
+ /**
378
+ * Load prompt template from a template f-string
379
+ */
380
+ static fromTemplate(template) {
381
+ const prompt = prompt_js_1.PromptTemplate.fromTemplate(template);
382
+ const humanTemplate = new HumanMessagePromptTemplate({ prompt });
383
+ return this.fromMessages([humanTemplate]);
384
+ }
377
385
  /**
378
386
  * Create a chat model-specific prompt from individual chat messages
379
387
  * or message-like tuples.
@@ -2,6 +2,7 @@ import { BaseCallbackConfig } from "../callbacks/manager.js";
2
2
  import { BaseMessage, BaseMessageLike, BasePromptValue, InputValues, PartialValues } from "../schema/index.js";
3
3
  import { Runnable } from "../schema/runnable/index.js";
4
4
  import { BasePromptTemplate, BasePromptTemplateInput, BaseStringPromptTemplate, TypedPromptInputValues } from "./base.js";
5
+ import { type ParamsFromFString } from "./prompt.js";
5
6
  /**
6
7
  * Abstract class that serves as a base for creating message prompt
7
8
  * templates. It defines how to format messages for different roles in a
@@ -170,6 +171,10 @@ export declare class ChatPromptTemplate<RunInput extends InputValues = any, Part
170
171
  _getPromptType(): "chat";
171
172
  formatMessages(values: TypedPromptInputValues<RunInput>): Promise<BaseMessage[]>;
172
173
  partial<NewPartialVariableName extends string>(values: PartialValues<NewPartialVariableName>): Promise<ChatPromptTemplate<InputValues<Exclude<Extract<keyof RunInput, string>, NewPartialVariableName>>, any>>;
174
+ /**
175
+ * Load prompt template from a template f-string
176
+ */
177
+ static fromTemplate<RunInput extends InputValues = Symbol, T extends string = string>(template: T): ChatPromptTemplate<RunInput extends Symbol ? ParamsFromFString<T> : RunInput, any>;
173
178
  /**
174
179
  * Create a chat model-specific prompt from individual chat messages
175
180
  * or message-like tuples.
@@ -362,6 +362,14 @@ export class ChatPromptTemplate extends BaseChatPromptTemplate {
362
362
  };
363
363
  return new ChatPromptTemplate(promptDict);
364
364
  }
365
+ /**
366
+ * Load prompt template from a template f-string
367
+ */
368
+ static fromTemplate(template) {
369
+ const prompt = PromptTemplate.fromTemplate(template);
370
+ const humanTemplate = new HumanMessagePromptTemplate({ prompt });
371
+ return this.fromMessages([humanTemplate]);
372
+ }
365
373
  /**
366
374
  * Create a chat model-specific prompt from individual chat messages
367
375
  * or message-like tuples.
@@ -1,6 +1,7 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.getMessages = exports.getLines = exports.getBytes = exports.EventStreamContentType = void 0;
3
+ exports.convertEventStreamToIterableReadableDataStream = exports.getMessages = exports.getLines = exports.getBytes = exports.EventStreamContentType = void 0;
4
+ const stream_js_1 = require("./stream.cjs");
4
5
  exports.EventStreamContentType = "text/event-stream";
5
6
  function isNodeJSReadable(x) {
6
7
  return x != null && typeof x === "object" && "on" in x;
@@ -199,6 +200,24 @@ function newMessage() {
199
200
  retry: undefined,
200
201
  };
201
202
  }
203
+ function convertEventStreamToIterableReadableDataStream(stream) {
204
+ const dataStream = new ReadableStream({
205
+ async start(controller) {
206
+ const enqueueLine = getMessages((msg) => {
207
+ if (msg.data)
208
+ controller.enqueue(msg.data);
209
+ });
210
+ const onLine = (line, fieldLength, flush) => {
211
+ enqueueLine(line, fieldLength, flush);
212
+ if (flush)
213
+ controller.close();
214
+ };
215
+ await getBytes(stream, getLines(onLine));
216
+ },
217
+ });
218
+ return stream_js_1.IterableReadableStream.fromReadableStream(dataStream);
219
+ }
220
+ exports.convertEventStreamToIterableReadableDataStream = convertEventStreamToIterableReadableDataStream;
202
221
  function isEmpty(message) {
203
222
  return (message.data === "" &&
204
223
  message.event === "" &&
@@ -1,3 +1,4 @@
1
+ import { IterableReadableStream } from "./stream.js";
1
2
  export declare const EventStreamContentType = "text/event-stream";
2
3
  /**
3
4
  * Represents a message sent in an event stream
@@ -35,3 +36,4 @@ export declare function getLines(onLine: (line: Uint8Array, fieldLength: number,
35
36
  * @returns A function that should be called for each incoming line buffer.
36
37
  */
37
38
  export declare function getMessages(onMessage?: (msg: EventSourceMessage) => void, onId?: (id: string) => void, onRetry?: (retry: number) => void): (line: Uint8Array, fieldLength: number, flush?: boolean) => void;
39
+ export declare function convertEventStreamToIterableReadableDataStream(stream: ReadableStream): IterableReadableStream<any>;
@@ -1,3 +1,4 @@
1
+ import { IterableReadableStream } from "./stream.js";
1
2
  export const EventStreamContentType = "text/event-stream";
2
3
  function isNodeJSReadable(x) {
3
4
  return x != null && typeof x === "object" && "on" in x;
@@ -193,6 +194,23 @@ function newMessage() {
193
194
  retry: undefined,
194
195
  };
195
196
  }
197
+ export function convertEventStreamToIterableReadableDataStream(stream) {
198
+ const dataStream = new ReadableStream({
199
+ async start(controller) {
200
+ const enqueueLine = getMessages((msg) => {
201
+ if (msg.data)
202
+ controller.enqueue(msg.data);
203
+ });
204
+ const onLine = (line, fieldLength, flush) => {
205
+ enqueueLine(line, fieldLength, flush);
206
+ if (flush)
207
+ controller.close();
208
+ };
209
+ await getBytes(stream, getLines(onLine));
210
+ },
211
+ });
212
+ return IterableReadableStream.fromReadableStream(dataStream);
213
+ }
196
214
  function isEmpty(message) {
197
215
  return (message.data === "" &&
198
216
  message.event === "" &&
@@ -0,0 +1,81 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.BaseWebSocketStream = void 0;
4
+ /**
5
+ * [WebSocket](https://developer.mozilla.org/en-US/docs/Web/API/WebSocket) with [Streams API](https://developer.mozilla.org/en-US/docs/Web/API/Streams_API)
6
+ *
7
+ * @see https://web.dev/websocketstream/
8
+ */
9
+ class BaseWebSocketStream {
10
+ constructor(url, options = {}) {
11
+ Object.defineProperty(this, "url", {
12
+ enumerable: true,
13
+ configurable: true,
14
+ writable: true,
15
+ value: void 0
16
+ });
17
+ Object.defineProperty(this, "connection", {
18
+ enumerable: true,
19
+ configurable: true,
20
+ writable: true,
21
+ value: void 0
22
+ });
23
+ Object.defineProperty(this, "closed", {
24
+ enumerable: true,
25
+ configurable: true,
26
+ writable: true,
27
+ value: void 0
28
+ });
29
+ Object.defineProperty(this, "close", {
30
+ enumerable: true,
31
+ configurable: true,
32
+ writable: true,
33
+ value: void 0
34
+ });
35
+ if (options.signal?.aborted) {
36
+ throw new DOMException("This operation was aborted", "AbortError");
37
+ }
38
+ this.url = url;
39
+ const ws = this.openWebSocket(url, options);
40
+ const closeWithInfo = ({ code, reason } = {}) => ws.close(code, reason);
41
+ this.connection = new Promise((resolve, reject) => {
42
+ ws.onopen = () => {
43
+ resolve({
44
+ readable: new ReadableStream({
45
+ start(controller) {
46
+ ws.onmessage = ({ data }) => controller.enqueue(data);
47
+ ws.onerror = (e) => controller.error(e);
48
+ },
49
+ cancel: closeWithInfo,
50
+ }),
51
+ writable: new WritableStream({
52
+ write(chunk) {
53
+ ws.send(chunk);
54
+ },
55
+ abort() {
56
+ ws.close();
57
+ },
58
+ close: closeWithInfo,
59
+ }),
60
+ protocol: ws.protocol,
61
+ extensions: ws.extensions,
62
+ });
63
+ ws.removeEventListener("error", reject);
64
+ };
65
+ ws.addEventListener("error", reject);
66
+ });
67
+ this.closed = new Promise((resolve, reject) => {
68
+ ws.onclose = ({ code, reason }) => {
69
+ resolve({ code, reason });
70
+ ws.removeEventListener("error", reject);
71
+ };
72
+ ws.addEventListener("error", reject);
73
+ });
74
+ if (options.signal) {
75
+ // eslint-disable-next-line no-param-reassign
76
+ options.signal.onabort = () => ws.close();
77
+ }
78
+ this.close = closeWithInfo;
79
+ }
80
+ }
81
+ exports.BaseWebSocketStream = BaseWebSocketStream;
@@ -0,0 +1,27 @@
1
+ export interface WebSocketConnection<T extends Uint8Array | string = Uint8Array | string> {
2
+ readable: ReadableStream<T>;
3
+ writable: WritableStream<T>;
4
+ protocol: string;
5
+ extensions: string;
6
+ }
7
+ export interface WebSocketCloseInfo {
8
+ code?: number;
9
+ reason?: string;
10
+ }
11
+ export interface WebSocketStreamOptions {
12
+ protocols?: string[];
13
+ signal?: AbortSignal;
14
+ }
15
+ /**
16
+ * [WebSocket](https://developer.mozilla.org/en-US/docs/Web/API/WebSocket) with [Streams API](https://developer.mozilla.org/en-US/docs/Web/API/Streams_API)
17
+ *
18
+ * @see https://web.dev/websocketstream/
19
+ */
20
+ export declare abstract class BaseWebSocketStream<T extends Uint8Array | string = Uint8Array | string> {
21
+ readonly url: string;
22
+ readonly connection: Promise<WebSocketConnection<T>>;
23
+ readonly closed: Promise<WebSocketCloseInfo>;
24
+ readonly close: (closeInfo?: WebSocketCloseInfo) => void;
25
+ constructor(url: string, options?: WebSocketStreamOptions);
26
+ abstract openWebSocket(url: string, options: WebSocketStreamOptions): WebSocket;
27
+ }