@adobe/llm-apps-runtime 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/LICENSE ADDED
@@ -0,0 +1,60 @@
1
+ Adobe Inc. ("Adobe")
2
+ Adobe Technology License Terms
3
+
4
+ ================================================================================
5
+
6
+ NOTICE TO USER: ADOBE LICENSES THE ADOBE TECHNOLOGY ONLY ON THE CONDITION
7
+ THAT YOU ACCEPT ALL OF THE TERMS CONTAINED OR REFERENCED IN THIS AGREEMENT.
8
+
9
+ ================================================================================
10
+
11
+ YOUR ACCESS TO AND USE OF THE ADOBE TECHNOLOGY IS GOVERNED BY THE ENTERPRISE
12
+ LICENSING TERMS PREVIOUSLY AGREED TO BY YOU AND ADOBE OR AN AUTHORIZED ADOBE
13
+ RESELLER IN A SEPARATE AGREEMENT. IF YOU HAVE NOT PREVIOUSLY AGREED TO
14
+ LICENSING TERMS THEN YOUR INSTALLATION AND USE OF THE ADOBE TECHNOLOGY IS
15
+ SUBJECT TO THE CURRENT APPLICABLE ADOBE LICENSING TERMS AVAILABLE AT
16
+ HTTP://WWW.ADOBE.COM/LEGAL/TERMS/ENTERPRISE-LICENSING.HTML. YOU AGREE THAT
17
+ THIS AGREEMENT WILL HAVE THE SAME EFFECT AS ANY WRITTEN NEGOTIATED AGREEMENT
18
+ SIGNED BY YOU.
19
+
20
+ By selecting the "I accept" button or other button or mechanism designed to
21
+ acknowledge agreement to the terms of an electronic copy of this Agreement, or
22
+ by installing, downloading, accessing, or otherwise copying or using all or any
23
+ portion of the Adobe Technology:
24
+
25
+ (i) you accept this Agreement on behalf of the entity for which you are
26
+ authorized to act (e.g., an employer) and acknowledge that such entity
27
+ is legally bound by this Agreement (and you agree to act in a manner
28
+ consistent with this Agreement) or, if there is no such entity for
29
+ which you are authorized to act, you accept this Agreement on behalf
30
+ of yourself as an individual and acknowledge that you are legally bound
31
+ by this Agreement,
32
+
33
+ (ii) you represent and warrant that you have the right, power and authority
34
+ to act on behalf of and bind such entity (if any) or yourself, and
35
+
36
+ (iii) you agree, as applicable, to use generative AI Adobe Technology
37
+ features in accordance with the Adobe Experience Cloud Generative AI
38
+ User Guidelines located at
39
+ https://www.adobe.com/legal/licenses-terms/adobe-dx-gen-ai-user-guidelines.html
40
+ ("Guidelines") which govern your use of generative AI features in the
41
+ Adobe Technology and are incorporated by reference into this Agreement.
42
+
43
+ You may not accept this Agreement on behalf of another entity unless you are an
44
+ employee or other agent of such other entity with the right, power and authority
45
+ to act on behalf of such other entity.
46
+
47
+ BY CLICKING TO ACCEPT, OR BY DOWNLOADING, COPYING, INSTALLING OR USING THE
48
+ ADOBE TECHNOLOGY, YOU ACCEPT AND AGREE TO BE BOUND BY ALL THE TERMS AND
49
+ CONDITIONS OF THIS AGREEMENT INCLUDING ALL TERMS INCORPORATED HEREIN BY
50
+ REFERENCE.
51
+
52
+ THIS AGREEMENT IS ENFORCEABLE AGAINST ANY PERSON OR ENTITY THAT INSTALLS AND
53
+ USES THE ADOBE TECHNOLOGY AND ANY PERSON OR ENTITY (E.G., SYSTEM INTEGRATOR,
54
+ CONSULTANT OR CONTRACTOR) THAT INSTALLS OR USES THE ADOBE TECHNOLOGY ON
55
+ ANOTHER PERSON'S OR ENTITY'S BEHALF.
56
+
57
+ ================================================================================
58
+
59
+ EULA-en_US-20250115
60
+ Copyright 2026 Adobe. All rights reserved.
package/README.md ADDED
@@ -0,0 +1,243 @@
1
+ # @adobe/llm-apps-runtime
2
+
3
+ Server runtime for **Adobe LLM Apps** — a platform for building AI tools and interactive widgets that plug into Claude, Cursor, ChatGPT, and other LLM hosts.
4
+
5
+ This package powers every Adobe LLM App deployed on [Adobe I/O Runtime](https://developer.adobe.com/runtime/). It handles the full request lifecycle — action loading, tool registration, widget resources, CORS, and transport — so app developers only need to write their action handlers. Under the hood it speaks the [Model Context Protocol](https://modelcontextprotocol.io/) via the official [MCP TypeScript SDK](https://github.com/modelcontextprotocol/typescript-sdk).
6
+
7
+ ## Install
8
+
9
+ ```bash
10
+ npm install @adobe/llm-apps-runtime
11
+ ```
12
+
13
+ ## Quick start
14
+
15
+ Wire the runtime into your Adobe I/O Runtime action:
16
+
17
+ ```js
18
+ // entry.js — webpack entry point of your Adobe I/O Runtime action
19
+ const { createMain } = require('@adobe/llm-apps-runtime')
20
+
21
+ const moduleContext = require.context('./actions', true, /index\.js$/)
22
+ const htmlContext = require.context('./actions', true, /widget\.html$/)
23
+ let actionsConfig = {}
24
+ try { actionsConfig = require('./actions.json') } catch (e) { /* optional */ }
25
+
26
+ module.exports = createMain({ moduleContext, htmlContext, actionsConfig })
27
+ ```
28
+
29
+ Write your handlers as plain async functions:
30
+
31
+ ```js
32
+ // actions/echo/index.js
33
+ module.exports = async ({ message }) => ({
34
+ content: [{ type: 'text', text: `Echo: ${message}` }]
35
+ })
36
+ ```
37
+
38
+ That's it. Deploy with `aio app deploy` and your handlers are reachable as tools in any LLM host that speaks MCP.
39
+
40
+ ## Features
41
+
42
+ - **Zero-config action discovery** — drop a folder under `actions/`, it becomes a tool
43
+ - **Config-driven metadata** — descriptions, schemas, annotations, and widget config come from `actions.json`
44
+ - **Interactive widgets** — ship `widget.html` alongside your handler for rich UIs in Claude, Cursor, ChatGPT, etc.
45
+ - **EDS widget support** — auto-generate widget markup for [Adobe Edge Delivery Services](https://www.aem.live/) content
46
+ - **Local development** — run your app as a plain HTTP server with no Adobe credentials
47
+ - **Test-friendly** — handlers are plain functions; a filesystem loader lets you write full-stack integration tests without webpack
48
+
49
+ ## API
50
+
51
+ ### `createMain(options)`
52
+
53
+ Creates the Adobe I/O Runtime `main(params)` function that serves your app.
54
+
55
+ ```js
56
+ const { main } = createMain(options)
57
+ ```
58
+
59
+ **Options**
60
+
61
+ | Option | Type | Default | Description |
62
+ |-----------------|-----------------|----------------------|-------------|
63
+ | `moduleContext` | webpack context | — | `require.context` for `actions/**/index.js`. Webpack path. |
64
+ | `htmlContext` | webpack context | — | `require.context` for `actions/**/widget.html`. Webpack path. |
65
+ | `actionsConfig` | object | `{}` | Parsed `actions.json` keyed by action name. Webpack path. |
66
+ | `actionsDir` | string | `<cwd>/actions` | Absolute path to the actions directory. Fs path. |
67
+ | `serverName` | string | `'adobe-llm-apps'` | App name reported to MCP clients. |
68
+ | `serverVersion` | string | `'1.0.0'` | App version reported to MCP clients. |
69
+
70
+ When `moduleContext` is provided, the webpack path is used. Otherwise the filesystem path is used (via `actionsDir`).
71
+
72
+ **Returns** `{ main }` — an async function compatible with Adobe I/O Runtime's action signature.
73
+
74
+ ### `createLocalServer(main, port)`
75
+
76
+ Starts a plain Node.js HTTP server that wraps a `main(params)` function. Useful for `npm run dev:local` in consumer apps — no Adobe credentials required.
77
+
78
+ ```js
79
+ const { createLocalServer } = require('@adobe/llm-apps-runtime')
80
+ const { main } = require('./dist/index.js') // your webpack bundle
81
+ createLocalServer(main, process.env.PORT || 9080)
82
+ ```
83
+
84
+ **Parameters**
85
+
86
+ | Parameter | Type | Default | Description |
87
+ |-----------|----------|---------|-------------|
88
+ | `main` | Function | — | The `main(params)` function returned by `createMain`. |
89
+ | `port` | number | `9080` | Port to listen on. |
90
+
91
+ **Returns** an [`http.Server`](https://nodejs.org/api/http.html#class-httpserver) instance.
92
+
93
+ ### Action loading primitives
94
+
95
+ Exported from the package root for advanced use cases (custom loaders, integration tests):
96
+
97
+ - `loadActionsFromContexts(server, { moduleContext, htmlContext, actionsConfig })` — webpack-based loader
98
+ - `loadActionsFromFs(server, actionsDir, actionsConfig)` — filesystem-based loader
99
+ - `loadActionsConfig(actionsDir)` — read and key `actions.json` by name
100
+
101
+ Most users won't need these — `createMain` calls them internally based on which options you pass.
102
+
103
+ ## Handler contract
104
+
105
+ A handler is an async function exported from `actions/<name>/index.js`:
106
+
107
+ ```js
108
+ module.exports = async function handler(args) {
109
+ return {
110
+ // Shown to the LLM and rendered as text by clients
111
+ content: [{ type: 'text', text: 'Result' }],
112
+
113
+ // Optional. Passed to the widget iframe via window.openai.toolOutput.
114
+ // Never sent to the LLM.
115
+ structuredContent: { key: 'value' }
116
+ }
117
+ }
118
+ ```
119
+
120
+ `args` is the tool's `arguments` object, already validated against the `inputSchema` in `actions.json`.
121
+
122
+ See the [MCP spec](https://modelcontextprotocol.io/docs/concepts/tools) for the full response schema.
123
+
124
+ ## `actions.json`
125
+
126
+ `actions.json` is the source of truth for tool metadata. The runtime reads it from one level above `actionsDir` (i.e. the app root). Each entry drives tool registration:
127
+
128
+ ```json
129
+ {
130
+ "actions": [
131
+ {
132
+ "name": "my-action",
133
+ "title": "My Action",
134
+ "description": "Does something useful",
135
+ "inputSchema": {
136
+ "type": "object",
137
+ "properties": {
138
+ "query": { "type": "string", "description": "The query" }
139
+ },
140
+ "required": ["query"]
141
+ },
142
+ "annotations": { "readOnlyHint": true },
143
+ "widget_type": "EDS",
144
+ "eds_widget": {
145
+ "script_url": "https://.../aem-embed.js",
146
+ "widget_embed_url": "https://.../embed"
147
+ },
148
+ "resource_meta": {
149
+ "ui": {
150
+ "csp": { "connect_domains": ["https://..."] },
151
+ "prefersBorder": true
152
+ }
153
+ },
154
+ "tool_meta": {
155
+ "openai/widgetAccessible": true
156
+ }
157
+ }
158
+ ]
159
+ }
160
+ ```
161
+
162
+ ### Widget resolution priority
163
+
164
+ 1. `widget.html` file in the action directory
165
+ 2. EDS config in `actions.json` (auto-generates `<aem-embed>` markup)
166
+ 3. Tool-only (no widget)
167
+
168
+ ## Testing your handlers
169
+
170
+ Handlers are plain async functions — test them directly:
171
+
172
+ ```js
173
+ const handler = require('./index.js')
174
+
175
+ test('echoes the message', async () => {
176
+ const out = await handler({ message: 'hello' })
177
+ expect(out.content[0].text).toBe('Echo: hello')
178
+ })
179
+ ```
180
+
181
+ For full-stack integration tests, use the filesystem loader path:
182
+
183
+ ```js
184
+ const path = require('path')
185
+ const { createMain } = require('@adobe/llm-apps-runtime')
186
+
187
+ const { main } = createMain({
188
+ actionsDir: path.join(__dirname, '..', 'actions')
189
+ })
190
+
191
+ test('tool call returns expected content', async () => {
192
+ const res = await main({
193
+ __ow_method: 'post',
194
+ __ow_body: JSON.stringify({
195
+ jsonrpc: '2.0',
196
+ id: 1,
197
+ method: 'tools/call',
198
+ params: { name: 'echo', arguments: { message: 'hi' } }
199
+ }),
200
+ __ow_headers: {
201
+ 'content-type': 'application/json',
202
+ accept: 'application/json;q=1.0, text/event-stream;q=0.5'
203
+ }
204
+ })
205
+ const body = JSON.parse(res.body)
206
+ expect(body.result.content[0].text).toBe('Echo: hi')
207
+ })
208
+ ```
209
+
210
+ ## Requirements
211
+
212
+ - Node.js `>=24.0.0` (matches Adobe I/O Runtime `nodejs:24`)
213
+ - Webpack 5 (for bundling when deploying to I/O Runtime)
214
+
215
+ ## Related
216
+
217
+ - [Model Context Protocol](https://modelcontextprotocol.io/) — The open protocol Adobe LLM Apps speak
218
+ - [MCP Apps extension](https://modelcontextprotocol.github.io/ext-apps/) — The widget spec this runtime implements
219
+ - [MCP TypeScript SDK](https://github.com/modelcontextprotocol/typescript-sdk) — Underlying protocol implementation
220
+ - [Adobe I/O Runtime](https://developer.adobe.com/runtime/) — Serverless platform Adobe LLM Apps deploy to
221
+
222
+ ## Releasing
223
+
224
+ Releases are published to npm automatically via GitHub Actions when a GitHub Release is created.
225
+
226
+ **Steps to release a new version:**
227
+
228
+ 1. Bump the version in `package.json` and commit to `main`:
229
+ ```bash
230
+ npm version patch # or minor / major
231
+ git push
232
+ ```
233
+ 2. Create a GitHub Release with a tag that matches the version (e.g. `v0.2.0`):
234
+ - Go to **Releases → Draft a new release** on GitHub
235
+ - Set the tag to `v<version>` (must match `package.json`)
236
+ - Click **Publish release**
237
+ 3. The [`publish` workflow](.github/workflows/publish.yml) runs `npm test` then `npm publish` automatically.
238
+
239
+ **Required secret:** Add `NPM_TOKEN` to the repository's **Settings → Secrets and variables → Actions**. The token must have the `publish` permission scoped to the `@adobe` org (or be a classic Automation token).
240
+
241
+ ## License
242
+
243
+ See [LICENSE](LICENSE).
package/package.json ADDED
@@ -0,0 +1,46 @@
1
+ {
2
+ "name": "@adobe/llm-apps-runtime",
3
+ "version": "0.2.0",
4
+ "description": "Server runtime for Adobe LLM Apps — a platform for building AI tools and interactive widgets on Adobe I/O Runtime",
5
+ "main": "src/index.js",
6
+ "exports": {
7
+ ".": "./src/index.js",
8
+ "./local": "./src/local.js",
9
+ "./package.json": "./package.json"
10
+ },
11
+ "files": [
12
+ "src",
13
+ "LICENSE",
14
+ "README.md"
15
+ ],
16
+ "scripts": {
17
+ "test": "jest",
18
+ "test:watch": "jest --watch",
19
+ "prepublishOnly": "npm test"
20
+ },
21
+ "publishConfig": {
22
+ "access": "public"
23
+ },
24
+ "dependencies": {
25
+ "@adobe/aio-sdk": "^5.0.0",
26
+ "@modelcontextprotocol/sdk": "^1.25.3",
27
+ "zod": "^3.23.8"
28
+ },
29
+ "devDependencies": {
30
+ "@babel/core": "^7.26.10",
31
+ "@babel/preset-env": "^7.26.9",
32
+ "babel-jest": "^29.7.0",
33
+ "jest": "^29.0.0"
34
+ },
35
+ "engines": {
36
+ "node": ">=24.0.0"
37
+ },
38
+ "keywords": [
39
+ "adobe-io",
40
+ "mcp",
41
+ "model-context-protocol",
42
+ "serverless",
43
+ "i/o-runtime"
44
+ ],
45
+ "license": "SEE LICENSE IN LICENSE"
46
+ }
@@ -0,0 +1,264 @@
1
+ /*
2
+ Copyright 2022 Adobe. All rights reserved.
3
+ This file is licensed to you under the Apache License, Version 2.0 (the "License");
4
+ you may not use this file except in compliance with the License. You may obtain a copy
5
+ of the License at http://www.apache.org/licenses/LICENSE-2.0
6
+
7
+ Unless required by applicable law or agreed to in writing, software distributed under
8
+ the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR REPRESENTATIONS
9
+ OF ANY KIND, either express or implied. See the License for the specific language
10
+ governing permissions and limitations under the License.
11
+ */
12
+
13
+ /**
14
+ * Per-invocation analytics buffering + signed batch flush.
15
+ *
16
+ * OpenWhisk freezes containers after `main()` returns, so we cannot fire-and-forget
17
+ * HTTP from one invocation and have it complete in the next. The wrapper buffers
18
+ * events in a per-invocation array, then performs a single awaited POST at
19
+ * end-of-request. One MCP request that triggers N tool calls = one HTTP round-trip
20
+ * with N events.
21
+ *
22
+ * Hard invariant: tool inputs and outputs are NEVER serialized into events.
23
+ * Only byte sizes are recorded via `safeSize()`.
24
+ *
25
+ * Cost model: this code runs inside the customer's Adobe I/O Runtime namespace
26
+ * and consumes the customer's App Builder Pack entitlement (6M GB-s/year). The
27
+ * 500 ms hard timeout caps the customer's billable activation extension at
28
+ * ~600 ms (rounded up to the nearest 100 ms by OpenWhisk billing). The circuit
29
+ * breaker exists primarily to protect the customer's GB-s budget when ingest
30
+ * is slow or unhealthy — load-shedding for the customer's wallet, not just for
31
+ * our service.
32
+ */
33
+
34
+ const crypto = require('crypto')
35
+
36
+ const DEFAULT_FLUSH_TIMEOUT_MS = 500
37
+ const MAX_EVENTS_PER_BATCH = 100
38
+ const MAX_BODY_BYTES = 256 * 1024
39
+
40
+ const CB_FAILURE_THRESHOLD = 3
41
+ const CB_COOLDOWN_MS = 30_000
42
+
43
+ // Module-level circuit breaker. Lives across requests in the same warm
44
+ // container, which is exactly what we want for shedding load when ingest
45
+ // is unhealthy or slow — every minute we keep the breaker open is a minute
46
+ // of customer GB-s we don't burn on doomed POSTs.
47
+ const breaker = {
48
+ consecutiveFailures: 0,
49
+ openUntil: 0
50
+ }
51
+
52
+ function isCircuitOpen () {
53
+ return Date.now() < breaker.openUntil
54
+ }
55
+
56
+ function recordSuccess () {
57
+ breaker.consecutiveFailures = 0
58
+ breaker.openUntil = 0
59
+ }
60
+
61
+ function recordFailure () {
62
+ breaker.consecutiveFailures += 1
63
+ if (breaker.consecutiveFailures >= CB_FAILURE_THRESHOLD) {
64
+ breaker.openUntil = Date.now() + CB_COOLDOWN_MS
65
+ }
66
+ }
67
+
68
+ function _resetBreakerForTest () {
69
+ breaker.consecutiveFailures = 0
70
+ breaker.openUntil = 0
71
+ }
72
+
73
+ /**
74
+ * Bounded JSON-size estimator. Computes serialized byte length only; never
75
+ * inspects content for analytics emission. Returns 0 on any serialization
76
+ * error so a misbehaving payload cannot crash the wrapper.
77
+ */
78
+ function safeSize (value) {
79
+ if (value === null || value === undefined) return 0
80
+ try {
81
+ return Buffer.byteLength(JSON.stringify(value), 'utf8')
82
+ } catch (e) {
83
+ return 0
84
+ }
85
+ }
86
+
87
+ /**
88
+ * Read analytics config from OpenWhisk action params. Returns null when
89
+ * any required field is missing; the wrapper short-circuits to a no-op
90
+ * so unconfigured deploys cost zero network calls.
91
+ */
92
+ function readAnalyticsConfig (params) {
93
+ if (!params || typeof params !== 'object') return null
94
+
95
+ const url = params.LLMA_ANALYTICS_URL
96
+ const appId = params.LLMA_ANALYTICS_APP_ID
97
+ const orgId = params.LLMA_ANALYTICS_ORG_ID
98
+ const version = params.LLMA_ANALYTICS_VERSION
99
+ const epoch = params.LLMA_ANALYTICS_EPOCH
100
+ const keyHex = params.LLMA_ANALYTICS_KEY
101
+
102
+ if (!url || !appId || !orgId || !version || epoch === undefined || epoch === null || !keyHex) {
103
+ return null
104
+ }
105
+
106
+ return {
107
+ url: String(url),
108
+ appId: String(appId),
109
+ orgId: String(orgId),
110
+ version: String(version),
111
+ epoch: String(epoch),
112
+ keyHex: String(keyHex),
113
+ runtimeNamespace: String(params.__OW_NAMESPACE || params.__ow_namespace || '')
114
+ }
115
+ }
116
+
117
+ /**
118
+ * Build the Authorization header for a signed envelope.
119
+ *
120
+ * MAC input is `version || \n || ts || \n || raw_body_bytes`. Server-side
121
+ * verification recomputes this exact string against the on-the-wire body bytes.
122
+ */
123
+ function signEnvelope ({ rawBody, ts, appId, version, epoch, kAppHex }) {
124
+ const macInput = `${version}\n${ts}\n${rawBody}`
125
+ const key = Buffer.from(kAppHex, 'hex')
126
+ const mac = crypto.createHmac('sha256', key).update(macInput, 'utf8').digest('base64')
127
+ return `LLMA-HMAC-SHA256 v=${version}, app=${appId}, epoch=${epoch}, ts=${ts}, mac=${mac}`
128
+ }
129
+
130
+ /**
131
+ * Per-invocation buffer with size enforcement. Returns an object with
132
+ * `push(event)` and `flush(cfg, opts)`.
133
+ *
134
+ * Limits are enforced on push: the 101st event and any event that would
135
+ * push the serialized envelope past 256 KB are dropped (counted via
136
+ * `droppedCount`). Dropping is the right failure mode here because the
137
+ * server caps each ingest request at the same numbers — sending more
138
+ * would just produce a 400 for the whole batch.
139
+ */
140
+ function createBuffer ({ logger } = {}) {
141
+ const events = []
142
+ let totalBytes = 0
143
+ let dropped = 0
144
+
145
+ function push (event) {
146
+ if (events.length >= MAX_EVENTS_PER_BATCH) {
147
+ dropped += 1
148
+ return false
149
+ }
150
+ let size
151
+ try {
152
+ size = Buffer.byteLength(JSON.stringify(event), 'utf8')
153
+ } catch (e) {
154
+ // Unserializable event (circular reference, BigInt, etc.) — drop
155
+ // it now rather than letting JSON.stringify(envelope) crash flush.
156
+ dropped += 1
157
+ return false
158
+ }
159
+ if (totalBytes + size > MAX_BODY_BYTES) {
160
+ dropped += 1
161
+ return false
162
+ }
163
+ events.push(event)
164
+ totalBytes += size
165
+ return true
166
+ }
167
+
168
+ async function flush (cfg, opts = {}) {
169
+ if (events.length === 0) {
170
+ return { ok: true, sent: 0, skipped: 'empty', dropped }
171
+ }
172
+ if (!cfg) {
173
+ return { ok: true, sent: 0, skipped: 'no-config', dropped }
174
+ }
175
+ if (isCircuitOpen()) {
176
+ logger?.warn?.('analytics circuit open; skipping flush')
177
+ return { ok: false, sent: 0, skipped: 'circuit-open', dropped }
178
+ }
179
+
180
+ const envelope = {
181
+ schema_version: 1,
182
+ app_id: cfg.appId,
183
+ org_id: cfg.orgId,
184
+ runtime_namespace: cfg.runtimeNamespace || '',
185
+ events: events.slice()
186
+ }
187
+
188
+ const ts = Math.floor(Date.now() / 1000)
189
+ let rawBody
190
+ try {
191
+ rawBody = JSON.stringify(envelope)
192
+ } catch (err) {
193
+ recordFailure()
194
+ logger?.warn?.(`analytics flush serialization failed: ${err.message}`)
195
+ return { ok: false, sent: 0, error: 'serialization-error', dropped }
196
+ }
197
+ const authorization = signEnvelope({
198
+ rawBody,
199
+ ts,
200
+ appId: cfg.appId,
201
+ version: cfg.version,
202
+ epoch: cfg.epoch,
203
+ kAppHex: cfg.keyHex
204
+ })
205
+
206
+ const fetchImpl = opts.fetchImpl || (typeof globalThis.fetch === 'function' ? globalThis.fetch : null)
207
+ if (!fetchImpl) {
208
+ recordFailure()
209
+ return { ok: false, sent: 0, error: 'no-fetch-impl', dropped }
210
+ }
211
+
212
+ const timeoutMs = opts.timeoutMs || DEFAULT_FLUSH_TIMEOUT_MS
213
+
214
+ try {
215
+ const res = await fetchImpl(cfg.url, {
216
+ method: 'POST',
217
+ headers: {
218
+ 'content-type': 'application/json',
219
+ authorization
220
+ },
221
+ body: rawBody,
222
+ signal: AbortSignal.timeout(timeoutMs)
223
+ })
224
+
225
+ if (res && res.ok) {
226
+ recordSuccess()
227
+ return { ok: true, sent: envelope.events.length, dropped }
228
+ }
229
+
230
+ recordFailure()
231
+ return {
232
+ ok: false,
233
+ sent: 0,
234
+ status: res ? res.status : 0,
235
+ dropped
236
+ }
237
+ } catch (err) {
238
+ recordFailure()
239
+ logger?.warn?.(`analytics flush failed: ${err.message}`)
240
+ return { ok: false, sent: 0, error: err.message, dropped }
241
+ }
242
+ }
243
+
244
+ return {
245
+ push,
246
+ flush,
247
+ get size () { return events.length },
248
+ get droppedCount () { return dropped },
249
+ get totalBytes () { return totalBytes }
250
+ }
251
+ }
252
+
253
+ module.exports = {
254
+ createBuffer,
255
+ readAnalyticsConfig,
256
+ safeSize,
257
+ signEnvelope,
258
+ MAX_EVENTS_PER_BATCH,
259
+ MAX_BODY_BYTES,
260
+ DEFAULT_FLUSH_TIMEOUT_MS,
261
+ CB_FAILURE_THRESHOLD,
262
+ CB_COOLDOWN_MS,
263
+ _resetBreakerForTest
264
+ }