@forge/llm 0.2.3-next.0-experimental-0c74a4b → 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +13 -2
- package/README.md +69 -2
- package/out/errors.d.ts +2 -0
- package/out/errors.d.ts.map +1 -1
- package/out/errors.js +4 -1
- package/out/index.d.ts +3 -2
- package/out/index.d.ts.map +1 -1
- package/out/index.js +4 -4
- package/out/interfaces/{llm-api.d.ts → internal.d.ts} +10 -2
- package/out/interfaces/internal.d.ts.map +1 -0
- package/out/interfaces/types.d.ts +3 -1
- package/out/interfaces/types.d.ts.map +1 -1
- package/out/llm-api.d.ts +4 -6
- package/out/llm-api.d.ts.map +1 -1
- package/out/llm-api.js +22 -13
- package/out/llm-client.d.ts +10 -0
- package/out/llm-client.d.ts.map +1 -0
- package/out/llm-client.js +29 -0
- package/out/response-mapper.d.ts +3 -1
- package/out/response-mapper.d.ts.map +1 -1
- package/out/response-mapper.js +17 -3
- package/out/streaming/llm-stream-parser.d.ts +11 -0
- package/out/streaming/llm-stream-parser.d.ts.map +1 -0
- package/out/streaming/llm-stream-parser.js +79 -0
- package/out/streaming/stream-response-wrapper.d.ts +18 -0
- package/out/streaming/stream-response-wrapper.d.ts.map +1 -0
- package/out/streaming/stream-response-wrapper.js +75 -0
- package/out/utils/error-handling.d.ts +1 -2
- package/out/utils/error-handling.d.ts.map +1 -1
- package/package.json +2 -2
- package/out/interfaces/llm-api.d.ts.map +0 -1
- /package/out/interfaces/{llm-api.js → internal.js} +0 -0
package/CHANGELOG.md
CHANGED
|
@@ -1,10 +1,21 @@
|
|
|
1
1
|
# @forge/llm
|
|
2
2
|
|
|
3
|
-
## 0.
|
|
3
|
+
## 0.3.0
|
|
4
|
+
|
|
5
|
+
### Minor Changes
|
|
6
|
+
|
|
7
|
+
- 14c7c1e: Add Forge LLM SDK streaming
|
|
4
8
|
|
|
5
9
|
### Patch Changes
|
|
6
10
|
|
|
7
|
-
-
|
|
11
|
+
- Updated dependencies [16e7d61]
|
|
12
|
+
- @forge/api@6.4.2
|
|
13
|
+
|
|
14
|
+
## 0.3.0-next.1
|
|
15
|
+
|
|
16
|
+
### Minor Changes
|
|
17
|
+
|
|
18
|
+
- 14c7c1e: Add Forge LLM SDK streaming
|
|
8
19
|
|
|
9
20
|
## 0.2.3-next.0
|
|
10
21
|
|
package/README.md
CHANGED
|
@@ -9,7 +9,9 @@ Library for Forge LLM
|
|
|
9
9
|
|
|
10
10
|
For the list of supported models visit [LLM Models](https://go.atlassian.com/forge-llms-api-reference/#supported-models)
|
|
11
11
|
|
|
12
|
-
|
|
12
|
+
The SDK supports a `chat` and a `stream` interface.
|
|
13
|
+
|
|
14
|
+
### Chat
|
|
13
15
|
|
|
14
16
|
```typescript
|
|
15
17
|
import { chat } from '@forge/llm';
|
|
@@ -62,7 +64,7 @@ const prompt = {
|
|
|
62
64
|
const response = await chat(prompt);
|
|
63
65
|
```
|
|
64
66
|
|
|
65
|
-
|
|
67
|
+
#### Response Structure
|
|
66
68
|
The response is a `ChatResponse` object:
|
|
67
69
|
|
|
68
70
|
```json
|
|
@@ -74,6 +76,7 @@ The response is a `ChatResponse` object:
|
|
|
74
76
|
},
|
|
75
77
|
"choices": [
|
|
76
78
|
{
|
|
79
|
+
"index": 0,
|
|
77
80
|
"finish_reason": "tool_use",
|
|
78
81
|
"message": {
|
|
79
82
|
"role": "assistant",
|
|
@@ -102,5 +105,69 @@ The response is a `ChatResponse` object:
|
|
|
102
105
|
}
|
|
103
106
|
```
|
|
104
107
|
|
|
108
|
+
### Stream
|
|
109
|
+
|
|
110
|
+
```typescript
|
|
111
|
+
import { stream } from '@forge/llm';
|
|
112
|
+
|
|
113
|
+
const prompt = {
|
|
114
|
+
model: 'claude-sonnet-4-20250514',
|
|
115
|
+
messages: [
|
|
116
|
+
{
|
|
117
|
+
role: 'user',
|
|
118
|
+
content: [
|
|
119
|
+
{ type: 'text', text: 'Write a long poem about the sun.'}
|
|
120
|
+
]
|
|
121
|
+
},
|
|
122
|
+
{
|
|
123
|
+
role: 'system',
|
|
124
|
+
content: [
|
|
125
|
+
{ type: 'text', text: 'You are a helpful assistant.' }
|
|
126
|
+
]
|
|
127
|
+
}
|
|
128
|
+
],
|
|
129
|
+
temperature: 0.7,
|
|
130
|
+
max_completion_tokens: 1000,
|
|
131
|
+
top_p: 0.9,
|
|
132
|
+
tools: [],
|
|
133
|
+
tool_choice: 'auto'
|
|
134
|
+
};
|
|
135
|
+
|
|
136
|
+
|
|
137
|
+
const streamResponse = await stream(prompt);
|
|
138
|
+
|
|
139
|
+
for await (const chunk of streamResponse) {
|
|
140
|
+
// consume each chunk as it arrives
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
streamResponse.close()
|
|
144
|
+
|
|
145
|
+
```
|
|
146
|
+
|
|
147
|
+
#### Response Structure
|
|
148
|
+
The response is a `StreamResponse` async iterable object that yields `StreamResponse` objects:
|
|
149
|
+
```json
|
|
150
|
+
{
|
|
151
|
+
"usage": {
|
|
152
|
+
"input_tokens": 150,
|
|
153
|
+
"output_tokens": 50,
|
|
154
|
+
"total_tokens": 200
|
|
155
|
+
},
|
|
156
|
+
"choices": [
|
|
157
|
+
{
|
|
158
|
+
"index": 0,
|
|
159
|
+
"message": {
|
|
160
|
+
"role": "assistant",
|
|
161
|
+
"content": [
|
|
162
|
+
{
|
|
163
|
+
"type": "text",
|
|
164
|
+
"text": "The sun rises in the east, "
|
|
165
|
+
}
|
|
166
|
+
]
|
|
167
|
+
}
|
|
168
|
+
}
|
|
169
|
+
]
|
|
170
|
+
}
|
|
171
|
+
```
|
|
105
172
|
### Errors
|
|
106
173
|
The SDK throws validation and Forge Errors.
|
package/out/errors.d.ts
CHANGED
|
@@ -25,4 +25,6 @@ export declare class ForgeLlmAPIError extends ForgeLlmError {
|
|
|
25
25
|
context: Record<string, unknown>;
|
|
26
26
|
constructor(responseDetails: APIErrorResponseDetails, forgeError: ForgeError);
|
|
27
27
|
}
|
|
28
|
+
export declare class StreamResponseError extends ForgeLlmError {
|
|
29
|
+
}
|
|
28
30
|
//# sourceMappingURL=errors.d.ts.map
|
package/out/errors.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"errors.d.ts","sourceRoot":"","sources":["../src/errors.ts"],"names":[],"mappings":"AAAA,eAAO,MAAM,UAAU;;;CAGb,CAAC;AAEX,MAAM,WAAW,UAAU;IACzB,IAAI,EAAE,MAAM,CAAC;IACb,OAAO,EAAE,MAAM,CAAC;IAChB,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;CACnC;AAED,qBAAa,aAAc,SAAQ,KAAK;gBAC1B,OAAO,EAAE,MAAM;CAI5B;AAED,qBAAa,qBAAsB,SAAQ,aAAa;gBAC1C,OAAO,EAAE,MAAM;CAI5B;AAED,MAAM,WAAW,uBAAuB;IACtC,MAAM,EAAE,MAAM,CAAC;IACf,UAAU,EAAE,MAAM,CAAC;IACnB,OAAO,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;CACzB;AAED,qBAAa,gBAAiB,SAAQ,aAAa;IACjD,eAAe,EAAE,uBAAuB,CAAC;IACzC,IAAI,EAAE,MAAM,CAAC;IACb,OAAO,EAAE,MAAM,CAAC;IAChB,OAAO,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;gBAErB,eAAe,EAAE,uBAAuB,EAAE,UAAU,EAAE,UAAU;CAU7E"}
|
|
1
|
+
{"version":3,"file":"errors.d.ts","sourceRoot":"","sources":["../src/errors.ts"],"names":[],"mappings":"AAAA,eAAO,MAAM,UAAU;;;CAGb,CAAC;AAEX,MAAM,WAAW,UAAU;IACzB,IAAI,EAAE,MAAM,CAAC;IACb,OAAO,EAAE,MAAM,CAAC;IAChB,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;CACnC;AAED,qBAAa,aAAc,SAAQ,KAAK;gBAC1B,OAAO,EAAE,MAAM;CAI5B;AAED,qBAAa,qBAAsB,SAAQ,aAAa;gBAC1C,OAAO,EAAE,MAAM;CAI5B;AAED,MAAM,WAAW,uBAAuB;IACtC,MAAM,EAAE,MAAM,CAAC;IACf,UAAU,EAAE,MAAM,CAAC;IACnB,OAAO,CAAC,EAAE,MAAM,GAAG,IAAI,CAAC;CACzB;AAED,qBAAa,gBAAiB,SAAQ,aAAa;IACjD,eAAe,EAAE,uBAAuB,CAAC;IACzC,IAAI,EAAE,MAAM,CAAC;IACb,OAAO,EAAE,MAAM,CAAC;IAChB,OAAO,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;gBAErB,eAAe,EAAE,uBAAuB,EAAE,UAAU,EAAE,UAAU;CAU7E;AAED,qBAAa,mBAAoB,SAAQ,aAAa;CAAG"}
|
package/out/errors.js
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.ForgeLlmAPIError = exports.PromptValidationError = exports.ForgeLlmError = exports.errorCodes = void 0;
|
|
3
|
+
exports.StreamResponseError = exports.ForgeLlmAPIError = exports.PromptValidationError = exports.ForgeLlmError = exports.errorCodes = void 0;
|
|
4
4
|
exports.errorCodes = {
|
|
5
5
|
FORGE_LLM_API_ERROR: 'FORGE_API_ERROR',
|
|
6
6
|
UNKNOWN_ERROR: 'UNKNOWN_ERROR'
|
|
@@ -35,3 +35,6 @@ class ForgeLlmAPIError extends ForgeLlmError {
|
|
|
35
35
|
}
|
|
36
36
|
}
|
|
37
37
|
exports.ForgeLlmAPIError = ForgeLlmAPIError;
|
|
38
|
+
class StreamResponseError extends ForgeLlmError {
|
|
39
|
+
}
|
|
40
|
+
exports.StreamResponseError = StreamResponseError;
|
package/out/index.d.ts
CHANGED
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
export * from './interfaces/types';
|
|
2
|
-
|
|
3
|
-
|
|
2
|
+
declare const chat: (prompt: import("./interfaces/types").Prompt) => Promise<import("./interfaces/internal").LlmResponse>;
|
|
3
|
+
declare const stream: (prompt: import("./interfaces/types").Prompt) => Promise<import("./streaming/stream-response-wrapper").StreamResponse>;
|
|
4
|
+
export { chat, stream };
|
|
4
5
|
//# sourceMappingURL=index.d.ts.map
|
package/out/index.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAEA,cAAc,oBAAoB,CAAC;AAInC,QAAA,MAAM,IAAI,uGAAoE,CAAC;AAE/E,QAAA,MAAM,MAAM,wHAAwE,CAAC;AAErF,OAAO,EAAE,IAAI,EAAE,MAAM,EAAE,CAAC"}
|
package/out/index.js
CHANGED
|
@@ -1,11 +1,11 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.chat = void 0;
|
|
3
|
+
exports.stream = exports.chat = void 0;
|
|
4
4
|
const tslib_1 = require("tslib");
|
|
5
5
|
const llm_api_1 = require("./llm-api");
|
|
6
|
-
const fetch_wrapper_1 = require("./fetch-wrapper");
|
|
7
6
|
tslib_1.__exportStar(require("./interfaces/types"), exports);
|
|
8
|
-
const llmApi = new llm_api_1.LlmApiImpl(
|
|
7
|
+
const llmApi = new llm_api_1.LlmApiImpl();
|
|
9
8
|
const chat = (...args) => llmApi.chat(...args);
|
|
10
9
|
exports.chat = chat;
|
|
11
|
-
|
|
10
|
+
const stream = (...args) => llmApi.stream(...args);
|
|
11
|
+
exports.stream = stream;
|
|
@@ -1,6 +1,14 @@
|
|
|
1
|
-
import { ChatResponse, Prompt } from './types';
|
|
1
|
+
import { ChatResponse, Prompt, StreamResponse } from './types';
|
|
2
2
|
export interface LlmApi {
|
|
3
3
|
chat(prompt: Prompt): Promise<ChatResponse>;
|
|
4
|
+
stream(prompt: Prompt): Promise<StreamResponse>;
|
|
5
|
+
}
|
|
6
|
+
export interface ClientArgs {
|
|
7
|
+
model: string;
|
|
8
|
+
body: Record<string, unknown>;
|
|
9
|
+
}
|
|
10
|
+
export interface LlmClient {
|
|
11
|
+
post(args: ClientArgs): Promise<Response>;
|
|
4
12
|
}
|
|
5
13
|
export interface LlmRequest {
|
|
6
14
|
messages: Message[];
|
|
@@ -73,4 +81,4 @@ export interface TextPart {
|
|
|
73
81
|
type: 'text';
|
|
74
82
|
text: string;
|
|
75
83
|
}
|
|
76
|
-
//# sourceMappingURL=
|
|
84
|
+
//# sourceMappingURL=internal.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"internal.d.ts","sourceRoot":"","sources":["../../src/interfaces/internal.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,YAAY,EAAE,MAAM,EAAE,cAAc,EAAE,MAAM,SAAS,CAAC;AAE/D,MAAM,WAAW,MAAM;IACrB,IAAI,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,YAAY,CAAC,CAAC;IAC5C,MAAM,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,cAAc,CAAC,CAAC;CACjD;AAED,MAAM,WAAW,UAAU;IACzB,KAAK,EAAE,MAAM,CAAC;IACd,IAAI,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;CAC/B;AAED,MAAM,WAAW,SAAS;IACxB,IAAI,CAAC,IAAI,EAAE,UAAU,GAAG,OAAO,CAAC,QAAQ,CAAC,CAAC;CAC3C;AAED,MAAM,WAAW,UAAU;IACzB,QAAQ,EAAE,OAAO,EAAE,CAAC;IACpB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,qBAAqB,CAAC,EAAE,MAAM,CAAC;IAC/B,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,KAAK,CAAC,EAAE,IAAI,EAAE,CAAC;IACf,WAAW,CAAC,EAAE,UAAU,CAAC;CAC1B;AAED,oBAAY,OAAO,GAAG,aAAa,GAAG,WAAW,GAAG,gBAAgB,GAAG,WAAW,CAAC;AAEnF,MAAM,WAAW,IAAI;IACnB,IAAI,EAAE,UAAU,CAAC;IACjB,QAAQ,EAAE;QACR,IAAI,EAAE,MAAM,CAAC;QACb,WAAW,EAAE,MAAM,CAAC;QACpB,UAAU,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;KACrC,CAAC;CACH;AAED,oBAAY,UAAU,GAClB,MAAM,GACN,MAAM,GACN,UAAU,GACV;IACE,IAAI,EAAE,UAAU,CAAC;IACjB,QAAQ,EAAE;QACR,IAAI,EAAE,MAAM,CAAC;KACd,CAAC;CACH,CAAC;AAEN,MAAM,WAAW,aAAa;IAC5B,OAAO,EAAE,OAAO,CAAC;IACjB,IAAI,EAAE,QAAQ,CAAC;CAChB;AAED,MAAM,WAAW,WAAW;IAC1B,OAAO,EAAE,OAAO,CAAC;IACjB,IAAI,EAAE,MAAM,CAAC;CACd;AACD,MAAM,WAAW,WAAW;IAC1B,OAAO,EAAE,OAAO,CAAC;IACjB,IAAI,EAAE,MAAM,CAAC;IACb,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB,IAAI,CAAC,EAAE,MAAM,CAAC;CACf;AAED,MAAM,WAAW,gBAAgB;IAC/B,OAAO,EAAE,OAAO,CAAC;IACjB,IAAI,EAAE,WAAW,CAAC;IAClB,UAAU,CAAC,EAAE,QAAQ,EAAE,CAAC;CACzB;AAED,MAAM,WAAW,QAAQ;IACvB,EAAE,EAAE,MAAM,CAAC;IACX,IAAI,EAAE,UAAU,CAAC;IACjB,KAAK,EAAE,MAAM,CAAC;IACd,QAAQ,EAAE;QACR,IAAI,EAAE,MAAM,CAAC;QACb,SAAS,EAAE,MAAM,CAAC;KACnB,CAAC;CACH;AAED,MAAM,WAAW,WAAW;IAC1B,OAAO,EAAE,MAAM,EAAE,CAAC;IAClB,KAAK,CAAC,EAAE,KAAK,CAAC;CACf;AAED,MAAM,WAAW,MAAM;IACrB,aAAa,EAAE,MAAM,CAAC;IACtB,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,OAAO,EAAE,gBAAgB,CAAC;CAC3B;AAED,MAAM,WAAW,KAAK;IACpB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB,WAAW,CAAC,EAAE,MAAM,CAAC;CACtB;AAED,oBAAY,OAAO,GAAG,MAAM,GAAG,WAAW,EAAE,CAAC;AAE7C,oBAAY,WAAW,GAAG,QAAQ,CAAC;AAEnC,MAAM,WAAW,QAAQ;IACvB,IAAI,EAAE,MAAM,CAAC;IACb,IAAI,EAAE,MAAM,CAAC;CACd"}
|
|
@@ -1,9 +1,11 @@
|
|
|
1
1
|
import { ForgeLlmError, PromptValidationError } from '../errors';
|
|
2
|
-
import {
|
|
2
|
+
import { StreamResponse } from '../streaming/stream-response-wrapper';
|
|
3
|
+
import { LlmRequest, LlmResponse, Message, Tool, ToolCall, Usage } from './internal';
|
|
3
4
|
export declare type Prompt = LlmRequest & {
|
|
4
5
|
model: string;
|
|
5
6
|
};
|
|
6
7
|
export declare type ChatResponse = LlmResponse;
|
|
8
|
+
export type { StreamResponse };
|
|
7
9
|
export declare type ToolSchema = Tool;
|
|
8
10
|
export type { Message, ToolCall, Usage };
|
|
9
11
|
export { ForgeLlmError, PromptValidationError };
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../../src/interfaces/types.ts"],"names":[],"mappings":"AAIA,OAAO,EAAE,aAAa,EAAE,qBAAqB,EAAE,MAAM,WAAW,CAAC;AACjE,OAAO,EAAE,UAAU,EAAE,WAAW,EAAE,OAAO,EAAE,IAAI,EAAE,QAAQ,EAAE,KAAK,EAAE,MAAM,
|
|
1
|
+
{"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../../src/interfaces/types.ts"],"names":[],"mappings":"AAIA,OAAO,EAAE,aAAa,EAAE,qBAAqB,EAAE,MAAM,WAAW,CAAC;AACjE,OAAO,EAAE,cAAc,EAAE,MAAM,sCAAsC,CAAC;AACtE,OAAO,EAAE,UAAU,EAAE,WAAW,EAAE,OAAO,EAAE,IAAI,EAAE,QAAQ,EAAE,KAAK,EAAE,MAAM,YAAY,CAAC;AAErF,oBAAY,MAAM,GAAG,UAAU,GAAG;IAChC,KAAK,EAAE,MAAM,CAAC;CACf,CAAC;AAEF,oBAAY,YAAY,GAAG,WAAW,CAAC;AAEvC,YAAY,EAAE,cAAc,EAAE,CAAC;AAE/B,oBAAY,UAAU,GAAG,IAAI,CAAC;AAE9B,YAAY,EAAE,OAAO,EAAE,QAAQ,EAAE,KAAK,EAAE,CAAC;AAEzC,OAAO,EAAE,aAAa,EAAE,qBAAqB,EAAE,CAAC"}
|
package/out/llm-api.d.ts
CHANGED
|
@@ -1,10 +1,8 @@
|
|
|
1
|
-
import {
|
|
2
|
-
import {
|
|
3
|
-
import { LlmApi } from './interfaces/llm-api';
|
|
1
|
+
import { ChatResponse, Prompt, StreamResponse } from './interfaces/types';
|
|
2
|
+
import { LlmApi, LlmClient } from './interfaces/internal';
|
|
4
3
|
export declare class LlmApiImpl implements LlmApi {
|
|
5
|
-
|
|
6
|
-
constructor(apiClient: FetchMethod);
|
|
4
|
+
llmClient: LlmClient;
|
|
7
5
|
chat: (prompt: Prompt) => Promise<ChatResponse>;
|
|
8
|
-
|
|
6
|
+
stream: (prompt: Prompt) => Promise<StreamResponse>;
|
|
9
7
|
}
|
|
10
8
|
//# sourceMappingURL=llm-api.d.ts.map
|
package/out/llm-api.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"llm-api.d.ts","sourceRoot":"","sources":["../src/llm-api.ts"],"names":[],"mappings":"AAAA,OAAO,
|
|
1
|
+
{"version":3,"file":"llm-api.d.ts","sourceRoot":"","sources":["../src/llm-api.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,YAAY,EAAE,MAAM,EAAE,cAAc,EAAE,MAAM,oBAAoB,CAAC;AAE1E,OAAO,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,uBAAuB,CAAC;AAK1D,qBAAa,UAAW,YAAW,MAAM;IACvC,SAAS,EAAE,SAAS,CAAuB;IAE3C,IAAI,WAAkB,MAAM,KAAG,QAAQ,YAAY,CAAC,CAgBlD;IAEF,MAAM,WAAkB,MAAM,KAAG,QAAQ,cAAc,CAAC,CAgBtD;CACH"}
|
package/out/llm-api.js
CHANGED
|
@@ -4,25 +4,34 @@ exports.LlmApiImpl = void 0;
|
|
|
4
4
|
const validators_1 = require("./validators");
|
|
5
5
|
const response_mapper_1 = require("./response-mapper");
|
|
6
6
|
const error_handling_1 = require("./utils/error-handling");
|
|
7
|
+
const llm_client_1 = require("./llm-client");
|
|
7
8
|
class LlmApiImpl {
|
|
8
|
-
|
|
9
|
-
constructor(apiClient) {
|
|
10
|
-
this.apiClient = apiClient;
|
|
11
|
-
}
|
|
9
|
+
llmClient = new llm_client_1.LlmClientImpl();
|
|
12
10
|
chat = async (prompt) => {
|
|
13
11
|
(0, validators_1.validatePrompt)(prompt);
|
|
14
12
|
const { model, ...request } = prompt;
|
|
15
|
-
const
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
13
|
+
const response = await this.llmClient.post({
|
|
14
|
+
model,
|
|
15
|
+
body: {
|
|
16
|
+
...request,
|
|
17
|
+
stream: false
|
|
18
|
+
}
|
|
19
19
|
});
|
|
20
20
|
await (0, error_handling_1.checkResponseError)(response);
|
|
21
|
-
return await (0, response_mapper_1.
|
|
21
|
+
return await (0, response_mapper_1.mapForgeLLMBatchResponse)(response);
|
|
22
|
+
};
|
|
23
|
+
stream = async (prompt) => {
|
|
24
|
+
(0, validators_1.validatePrompt)(prompt);
|
|
25
|
+
const { model, ...request } = prompt;
|
|
26
|
+
const response = await this.llmClient.post({
|
|
27
|
+
model,
|
|
28
|
+
body: {
|
|
29
|
+
...request,
|
|
30
|
+
stream: true
|
|
31
|
+
}
|
|
32
|
+
});
|
|
33
|
+
await (0, error_handling_1.checkResponseError)(response);
|
|
34
|
+
return (0, response_mapper_1.mapForgeLLMStreamResponse)(response);
|
|
22
35
|
};
|
|
23
|
-
buildForgeLlmUrl(model) {
|
|
24
|
-
const baseUrl = 'https://llm';
|
|
25
|
-
return `${baseUrl}/${encodeURIComponent(model)}`;
|
|
26
|
-
}
|
|
27
36
|
}
|
|
28
37
|
exports.LlmApiImpl = LlmApiImpl;
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
import { ClientArgs, LlmClient } from './interfaces/internal';
|
|
2
|
+
export declare class LlmClientImpl implements LlmClient {
|
|
3
|
+
tracing: {
|
|
4
|
+
traceId: string;
|
|
5
|
+
spanId: string;
|
|
6
|
+
};
|
|
7
|
+
post(args: ClientArgs): Promise<Response>;
|
|
8
|
+
private buildForgeLlmUrl;
|
|
9
|
+
}
|
|
10
|
+
//# sourceMappingURL=llm-client.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"llm-client.d.ts","sourceRoot":"","sources":["../src/llm-client.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,UAAU,EAAE,SAAS,EAAE,MAAM,uBAAuB,CAAC;AAE9D,qBAAa,aAAc,YAAW,SAAS;IAC7C,OAAO;;;MAA2B;IAE5B,IAAI,CAAC,IAAI,EAAE,UAAU,GAAG,OAAO,CAAC,QAAQ,CAAC;IAkB/C,OAAO,CAAC,gBAAgB;CAIzB"}
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.LlmClientImpl = void 0;
|
|
4
|
+
const api_1 = require("@forge/api");
|
|
5
|
+
class LlmClientImpl {
|
|
6
|
+
tracing = (0, api_1.__getRuntime)()?.tracing;
|
|
7
|
+
async post(args) {
|
|
8
|
+
const { model, body } = args;
|
|
9
|
+
const { traceId, spanId } = this.tracing;
|
|
10
|
+
const path = this.buildForgeLlmUrl(model);
|
|
11
|
+
const options = {
|
|
12
|
+
method: 'POST',
|
|
13
|
+
body: JSON.stringify(body),
|
|
14
|
+
headers: {
|
|
15
|
+
'Content-Type': 'application/json',
|
|
16
|
+
'x-b3-traceid': traceId,
|
|
17
|
+
'x-b3-spanid': spanId
|
|
18
|
+
}
|
|
19
|
+
};
|
|
20
|
+
return await global.__forge_fetch__({ type: 'llm', model: model }, path, {
|
|
21
|
+
...options
|
|
22
|
+
});
|
|
23
|
+
}
|
|
24
|
+
buildForgeLlmUrl(model) {
|
|
25
|
+
const baseUrl = 'https://llm';
|
|
26
|
+
return `${baseUrl}/${encodeURIComponent(model)}`;
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
exports.LlmClientImpl = LlmClientImpl;
|
package/out/response-mapper.d.ts
CHANGED
|
@@ -1,4 +1,6 @@
|
|
|
1
1
|
import { APIResponse } from '@forge/api';
|
|
2
2
|
import { ChatResponse } from './interfaces/types';
|
|
3
|
-
|
|
3
|
+
import { StreamResponse } from './streaming/stream-response-wrapper';
|
|
4
|
+
export declare const mapForgeLLMBatchResponse: (response: APIResponse) => Promise<ChatResponse>;
|
|
5
|
+
export declare const mapForgeLLMStreamResponse: (response: Response) => StreamResponse;
|
|
4
6
|
//# sourceMappingURL=response-mapper.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"response-mapper.d.ts","sourceRoot":"","sources":["../src/response-mapper.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,WAAW,EAAE,MAAM,YAAY,CAAC;AACzC,OAAO,EAAE,YAAY,EAAE,MAAM,oBAAoB,CAAC;AAElD,eAAO,MAAM,
|
|
1
|
+
{"version":3,"file":"response-mapper.d.ts","sourceRoot":"","sources":["../src/response-mapper.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,WAAW,EAAE,MAAM,YAAY,CAAC;AACzC,OAAO,EAAE,YAAY,EAAE,MAAM,oBAAoB,CAAC;AAElD,OAAO,EAAyB,cAAc,EAAE,MAAM,qCAAqC,CAAC;AAE5F,eAAO,MAAM,wBAAwB,aAAoB,WAAW,KAAG,QAAQ,YAAY,CAG1F,CAAC;AAEF,eAAO,MAAM,yBAAyB,aAAc,QAAQ,KAAG,cAQ9D,CAAC"}
|
package/out/response-mapper.js
CHANGED
|
@@ -1,8 +1,22 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.
|
|
4
|
-
const
|
|
3
|
+
exports.mapForgeLLMStreamResponse = exports.mapForgeLLMBatchResponse = void 0;
|
|
4
|
+
const stream_response_wrapper_1 = require("./streaming/stream-response-wrapper");
|
|
5
|
+
const mapForgeLLMBatchResponse = async (response) => {
|
|
5
6
|
const chatResponse = await response.json();
|
|
6
7
|
return chatResponse;
|
|
7
8
|
};
|
|
8
|
-
exports.
|
|
9
|
+
exports.mapForgeLLMBatchResponse = mapForgeLLMBatchResponse;
|
|
10
|
+
const mapForgeLLMStreamResponse = (response) => {
|
|
11
|
+
if (response.body) {
|
|
12
|
+
const webStream = response.body;
|
|
13
|
+
const responseWrapper = new stream_response_wrapper_1.StreamResponseWrapper(webStream);
|
|
14
|
+
return responseWrapper.toAsyncIterable();
|
|
15
|
+
}
|
|
16
|
+
else {
|
|
17
|
+
return new stream_response_wrapper_1.StreamResponse(emptyStream());
|
|
18
|
+
}
|
|
19
|
+
};
|
|
20
|
+
exports.mapForgeLLMStreamResponse = mapForgeLLMStreamResponse;
|
|
21
|
+
async function* emptyStream() {
|
|
22
|
+
}
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
import { LlmResponse } from '../interfaces/internal';
|
|
2
|
+
export declare class LlmStreamParser {
|
|
3
|
+
SEPARATOR: Set<string>;
|
|
4
|
+
private fragments;
|
|
5
|
+
parse(chunk: Uint8Array): LlmResponse[];
|
|
6
|
+
flush(): LlmResponse[];
|
|
7
|
+
private collectSegment;
|
|
8
|
+
private safeJsonParse;
|
|
9
|
+
private splitConcatenatedJson;
|
|
10
|
+
}
|
|
11
|
+
//# sourceMappingURL=llm-stream-parser.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"llm-stream-parser.d.ts","sourceRoot":"","sources":["../../src/streaming/llm-stream-parser.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,WAAW,EAAE,MAAM,wBAAwB,CAAC;AAMrD,qBAAa,eAAe;IAC1B,SAAS,cAAmC;IAC5C,OAAO,CAAC,SAAS,CAAgB;IAKjC,KAAK,CAAC,KAAK,EAAE,UAAU,GAAG,WAAW,EAAE;IAsCvC,KAAK,IAAI,WAAW,EAAE;IAatB,OAAO,CAAC,cAAc;IAgBtB,OAAO,CAAC,aAAa;IAmBrB,OAAO,CAAC,qBAAqB;CAO9B"}
|
|
@@ -0,0 +1,79 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.LlmStreamParser = void 0;
|
|
4
|
+
class LlmStreamParser {
|
|
5
|
+
SEPARATOR = new Set(['\n', '\n\n']);
|
|
6
|
+
fragments = [];
|
|
7
|
+
parse(chunk) {
|
|
8
|
+
const text = Buffer.from(chunk).toString();
|
|
9
|
+
if (!text) {
|
|
10
|
+
return [];
|
|
11
|
+
}
|
|
12
|
+
const out = [];
|
|
13
|
+
let start = 0;
|
|
14
|
+
let i = 0;
|
|
15
|
+
while (i < text.length) {
|
|
16
|
+
const char = text.charAt(i);
|
|
17
|
+
if (this.SEPARATOR.has(char)) {
|
|
18
|
+
this.collectSegment(text, start, i, out);
|
|
19
|
+
i += 1;
|
|
20
|
+
start = i;
|
|
21
|
+
continue;
|
|
22
|
+
}
|
|
23
|
+
i += 1;
|
|
24
|
+
}
|
|
25
|
+
if (start < text.length) {
|
|
26
|
+
const tail = text.slice(start);
|
|
27
|
+
if (this.fragments.length) {
|
|
28
|
+
this.fragments.push(tail);
|
|
29
|
+
}
|
|
30
|
+
else {
|
|
31
|
+
this.fragments = [tail];
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
return out;
|
|
35
|
+
}
|
|
36
|
+
flush() {
|
|
37
|
+
if (this.fragments.length === 0)
|
|
38
|
+
return [];
|
|
39
|
+
const line = this.fragments.join('');
|
|
40
|
+
this.fragments = [];
|
|
41
|
+
return line ? this.splitConcatenatedJson(line) : [];
|
|
42
|
+
}
|
|
43
|
+
collectSegment(text, start, end, out) {
|
|
44
|
+
const segment = text.slice(start, end);
|
|
45
|
+
if (this.fragments.length) {
|
|
46
|
+
const parsed = this.safeJsonParse(this.fragments.join('') + segment);
|
|
47
|
+
if (parsed) {
|
|
48
|
+
out.push(parsed);
|
|
49
|
+
this.fragments = [];
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
else {
|
|
53
|
+
const parsed = this.safeJsonParse(segment);
|
|
54
|
+
if (parsed) {
|
|
55
|
+
out.push(parsed);
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
safeJsonParse(input) {
|
|
60
|
+
let line = null;
|
|
61
|
+
try {
|
|
62
|
+
const trimmedInput = input.trim();
|
|
63
|
+
if (trimmedInput) {
|
|
64
|
+
line = JSON.parse(trimmedInput);
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
catch {
|
|
68
|
+
}
|
|
69
|
+
return line;
|
|
70
|
+
}
|
|
71
|
+
splitConcatenatedJson(input) {
|
|
72
|
+
const regex = /(?<=\})(?=\{)/;
|
|
73
|
+
return input
|
|
74
|
+
.split(regex)
|
|
75
|
+
.map((s) => this.safeJsonParse(s))
|
|
76
|
+
.filter((s) => s !== null);
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
exports.LlmStreamParser = LlmStreamParser;
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
/// <reference types="node" />
|
|
2
|
+
import { ChatResponse } from '../interfaces/types';
|
|
3
|
+
import { ReadableStream } from 'stream/web';
|
|
4
|
+
export declare class StreamResponseWrapper {
|
|
5
|
+
private readonly reader;
|
|
6
|
+
constructor(readableStream: ReadableStream<Uint8Array>);
|
|
7
|
+
private asyncIterableFromReadableStream;
|
|
8
|
+
close(): Promise<void>;
|
|
9
|
+
toAsyncIterable(): StreamResponse;
|
|
10
|
+
}
|
|
11
|
+
export declare class StreamResponse implements AsyncIterable<ChatResponse> {
|
|
12
|
+
private readonly iterator;
|
|
13
|
+
private readonly wrapper?;
|
|
14
|
+
constructor(iterator: AsyncIterable<ChatResponse>, wrapper?: StreamResponseWrapper | undefined);
|
|
15
|
+
[Symbol.asyncIterator](): AsyncIterator<ChatResponse>;
|
|
16
|
+
close(): Promise<void> | undefined;
|
|
17
|
+
}
|
|
18
|
+
//# sourceMappingURL=stream-response-wrapper.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"stream-response-wrapper.d.ts","sourceRoot":"","sources":["../../src/streaming/stream-response-wrapper.ts"],"names":[],"mappings":";AAAA,OAAO,EAAE,YAAY,EAAE,MAAM,qBAAqB,CAAC;AAEnD,OAAO,EAAE,cAAc,EAA+B,MAAM,YAAY,CAAC;AAGzE,qBAAa,qBAAqB;IAChC,OAAO,CAAC,QAAQ,CAAC,MAAM,CAA0C;gBAErD,cAAc,EAAE,cAAc,CAAC,UAAU,CAAC;IAItD,OAAO,CAAC,+BAA+B;IAsBjC,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC;IAI5B,eAAe,IAAI,cAAc;CAwBlC;AAED,qBAAa,cAAe,YAAW,aAAa,CAAC,YAAY,CAAC;IAE9D,OAAO,CAAC,QAAQ,CAAC,QAAQ;IACzB,OAAO,CAAC,QAAQ,CAAC,OAAO,CAAC;gBADR,QAAQ,EAAE,aAAa,CAAC,YAAY,CAAC,EACrC,OAAO,CAAC,mCAAuB;IAElD,CAAC,MAAM,CAAC,aAAa,CAAC,IAAI,aAAa,CAAC,YAAY,CAAC;IAIrD,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC,GAAG,SAAS;CAGnC"}
|
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.StreamResponse = exports.StreamResponseWrapper = void 0;
|
|
4
|
+
const llm_stream_parser_1 = require("./llm-stream-parser");
|
|
5
|
+
const errors_1 = require("../errors");
|
|
6
|
+
class StreamResponseWrapper {
|
|
7
|
+
reader;
|
|
8
|
+
constructor(readableStream) {
|
|
9
|
+
this.reader = readableStream.getReader();
|
|
10
|
+
}
|
|
11
|
+
asyncIterableFromReadableStream() {
|
|
12
|
+
const reader = this.reader;
|
|
13
|
+
return {
|
|
14
|
+
[Symbol.asyncIterator]() {
|
|
15
|
+
return {
|
|
16
|
+
async next() {
|
|
17
|
+
try {
|
|
18
|
+
const { done, value } = await reader.read();
|
|
19
|
+
if (done) {
|
|
20
|
+
return { done: true, value: undefined };
|
|
21
|
+
}
|
|
22
|
+
return { done: false, value };
|
|
23
|
+
}
|
|
24
|
+
catch (error) {
|
|
25
|
+
reader.releaseLock();
|
|
26
|
+
throw new errors_1.StreamResponseError(error);
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
};
|
|
30
|
+
}
|
|
31
|
+
};
|
|
32
|
+
}
|
|
33
|
+
async close() {
|
|
34
|
+
await this.reader.cancel();
|
|
35
|
+
}
|
|
36
|
+
toAsyncIterable() {
|
|
37
|
+
async function* mapToLlmResponse(source) {
|
|
38
|
+
const parser = new llm_stream_parser_1.LlmStreamParser();
|
|
39
|
+
try {
|
|
40
|
+
for await (const chunk of source) {
|
|
41
|
+
const lines = parser.parse(chunk);
|
|
42
|
+
for (const line of lines) {
|
|
43
|
+
yield line;
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
const remaining = parser.flush();
|
|
47
|
+
for (const line of remaining) {
|
|
48
|
+
yield line;
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
catch (e) {
|
|
52
|
+
throw new errors_1.StreamResponseError(e);
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
const iterable = this.asyncIterableFromReadableStream();
|
|
56
|
+
const mappedIterable = mapToLlmResponse(iterable);
|
|
57
|
+
return new StreamResponse(mappedIterable, this);
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
exports.StreamResponseWrapper = StreamResponseWrapper;
|
|
61
|
+
class StreamResponse {
|
|
62
|
+
iterator;
|
|
63
|
+
wrapper;
|
|
64
|
+
constructor(iterator, wrapper) {
|
|
65
|
+
this.iterator = iterator;
|
|
66
|
+
this.wrapper = wrapper;
|
|
67
|
+
}
|
|
68
|
+
[Symbol.asyncIterator]() {
|
|
69
|
+
return this.iterator[Symbol.asyncIterator]();
|
|
70
|
+
}
|
|
71
|
+
close() {
|
|
72
|
+
return this.wrapper?.close();
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
exports.StreamResponse = StreamResponse;
|
|
@@ -1,6 +1,5 @@
|
|
|
1
1
|
import { ForgeError } from '../errors';
|
|
2
|
-
import { APIResponse } from '@forge/api';
|
|
3
2
|
export declare function isForgeError(body: unknown): body is ForgeError;
|
|
4
|
-
export declare function checkResponseError(response:
|
|
3
|
+
export declare function checkResponseError(response: Response): Promise<void>;
|
|
5
4
|
export declare function safeGetParsedBody(text: string): unknown | undefined;
|
|
6
5
|
//# sourceMappingURL=error-handling.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"error-handling.d.ts","sourceRoot":"","sources":["../../src/utils/error-handling.ts"],"names":[],"mappings":"AAAA,OAAO,EAAuC,UAAU,EAAoB,MAAM,WAAW,CAAC;
|
|
1
|
+
{"version":3,"file":"error-handling.d.ts","sourceRoot":"","sources":["../../src/utils/error-handling.ts"],"names":[],"mappings":"AAAA,OAAO,EAAuC,UAAU,EAAoB,MAAM,WAAW,CAAC;AAU9F,wBAAgB,YAAY,CAAC,IAAI,EAAE,OAAO,GAAG,IAAI,IAAI,UAAU,CAE9D;AAED,wBAAsB,kBAAkB,CAAC,QAAQ,EAAE,QAAQ,GAAG,OAAO,CAAC,IAAI,CAAC,CAuB1E;AAYD,wBAAgB,iBAAiB,CAAC,IAAI,EAAE,MAAM,GAAG,OAAO,GAAG,SAAS,CAMnE"}
|
package/package.json
CHANGED
|
@@ -1,13 +1,13 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@forge/llm",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "0.3.0",
|
|
4
4
|
"description": "Forge LLM SDK",
|
|
5
5
|
"main": "out/index.js",
|
|
6
6
|
"types": "out/index.d.ts",
|
|
7
7
|
"author": "Atlassian",
|
|
8
8
|
"license": "SEE LICENSE IN LICENSE.txt",
|
|
9
9
|
"dependencies": {
|
|
10
|
-
"@forge/api": "^6.4.2
|
|
10
|
+
"@forge/api": "^6.4.2"
|
|
11
11
|
},
|
|
12
12
|
"devDependencies": {
|
|
13
13
|
"@types/node": "20.19.1",
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"llm-api.d.ts","sourceRoot":"","sources":["../../src/interfaces/llm-api.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,YAAY,EAAE,MAAM,EAAE,MAAM,SAAS,CAAC;AAE/C,MAAM,WAAW,MAAM;IACrB,IAAI,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,YAAY,CAAC,CAAC;CAC7C;AAED,MAAM,WAAW,UAAU;IACzB,QAAQ,EAAE,OAAO,EAAE,CAAC;IACpB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,qBAAqB,CAAC,EAAE,MAAM,CAAC;IAC/B,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,KAAK,CAAC,EAAE,IAAI,EAAE,CAAC;IACf,WAAW,CAAC,EAAE,UAAU,CAAC;CAC1B;AAED,oBAAY,OAAO,GAAG,aAAa,GAAG,WAAW,GAAG,gBAAgB,GAAG,WAAW,CAAC;AAEnF,MAAM,WAAW,IAAI;IACnB,IAAI,EAAE,UAAU,CAAC;IACjB,QAAQ,EAAE;QACR,IAAI,EAAE,MAAM,CAAC;QACb,WAAW,EAAE,MAAM,CAAC;QACpB,UAAU,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;KACrC,CAAC;CACH;AAED,oBAAY,UAAU,GAClB,MAAM,GACN,MAAM,GACN,UAAU,GACV;IACE,IAAI,EAAE,UAAU,CAAC;IACjB,QAAQ,EAAE;QACR,IAAI,EAAE,MAAM,CAAC;KACd,CAAC;CACH,CAAC;AAEN,MAAM,WAAW,aAAa;IAC5B,OAAO,EAAE,OAAO,CAAC;IACjB,IAAI,EAAE,QAAQ,CAAC;CAChB;AAED,MAAM,WAAW,WAAW;IAC1B,OAAO,EAAE,OAAO,CAAC;IACjB,IAAI,EAAE,MAAM,CAAC;CACd;AACD,MAAM,WAAW,WAAW;IAC1B,OAAO,EAAE,OAAO,CAAC;IACjB,IAAI,EAAE,MAAM,CAAC;IACb,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB,IAAI,CAAC,EAAE,MAAM,CAAC;CACf;AAED,MAAM,WAAW,gBAAgB;IAC/B,OAAO,EAAE,OAAO,CAAC;IACjB,IAAI,EAAE,WAAW,CAAC;IAClB,UAAU,CAAC,EAAE,QAAQ,EAAE,CAAC;CACzB;AAED,MAAM,WAAW,QAAQ;IACvB,EAAE,EAAE,MAAM,CAAC;IACX,IAAI,EAAE,UAAU,CAAC;IACjB,KAAK,EAAE,MAAM,CAAC;IACd,QAAQ,EAAE;QACR,IAAI,EAAE,MAAM,CAAC;QACb,SAAS,EAAE,MAAM,CAAC;KACnB,CAAC;CACH;AAED,MAAM,WAAW,WAAW;IAC1B,OAAO,EAAE,MAAM,EAAE,CAAC;IAClB,KAAK,CAAC,EAAE,KAAK,CAAC;CACf;AAED,MAAM,WAAW,MAAM;IACrB,aAAa,EAAE,MAAM,CAAC;IACtB,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,OAAO,EAAE,gBAAgB,CAAC;CAC3B;AAED,MAAM,WAAW,KAAK;IACpB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB,WAAW,CAAC,EAAE,MAAM,CAAC;CACtB;AAED,oBAAY,OAAO,GAAG,MAAM,GAAG,WAAW,EAAE,CAAC;AAE7C,oBAAY,WAAW,GAAG,QAAQ,CAAC;AAEnC,MAAM,WAAW,QAAQ;IACvB,IAAI,EAAE,MAAM,CAAC;IACb,IAAI,EAAE,MAAM,CAAC;CACd"}
|
|
File without changes
|