agentid-sdk 0.1.26 → 0.1.29

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -110,6 +110,8 @@ const response = await secured.chat.completions.create({
110
110
  console.log(response.choices[0]?.message?.content ?? "");
111
111
  ```
112
112
 
113
+ Wrapped OpenAI calls persist telemetry for both regular and streamed completions. For `stream: true`, logging happens when the stream finishes.
114
+
113
115
  > Scope note: AgentID compliance/risk controls apply to the specific SDK-wrapped LLM calls (`guard()`, `wrapOpenAI()`, LangChain callback-wrapped flows). They do not automatically classify unrelated code paths in your whole monolithic application.
114
116
 
115
117
  ### LangChain Integration
@@ -145,6 +147,8 @@ const result = await chain.invoke(
145
147
  console.log(result);
146
148
  ```
147
149
 
150
+ LangChain callbacks log on run completion. Token/cost telemetry for streamed chains depends on the provider exposing usage in the final LangChain result.
151
+
148
152
  ### Raw Ingest API (Telemetry Only)
149
153
 
150
154
  ```ts
@@ -18,6 +18,7 @@ interface GuardParams {
18
18
  user_id?: string;
19
19
  client_event_id?: string;
20
20
  expected_languages?: string[];
21
+ request_identity?: Record<string, unknown>;
21
22
  client_capabilities?: {
22
23
  capabilities: {
23
24
  has_feedback_handler: boolean;
@@ -67,10 +68,11 @@ interface LogParams {
67
68
  input: string;
68
69
  output: string;
69
70
  model: string;
70
- usage?: Record<string, number>;
71
- tokens?: Record<string, number>;
71
+ usage?: Record<string, unknown>;
72
+ tokens?: Record<string, unknown>;
72
73
  latency?: number;
73
74
  user_id?: string;
75
+ request_identity?: Record<string, unknown>;
74
76
  metadata?: Record<string, unknown>;
75
77
  event_type?: "start" | "complete" | "error" | "human_override" | "security_alert" | "security_block" | "security_policy_violation" | "transparency_badge_rendered";
76
78
  severity?: "info" | "warning" | "error" | "high";
@@ -179,6 +181,7 @@ declare class AgentID {
179
181
  guard(params: GuardParams, options?: RequestOptions): Promise<GuardResponse>;
180
182
  private sendIngest;
181
183
  private extractStreamChunkText;
184
+ private extractStreamChunkUsage;
182
185
  private wrapCompletion;
183
186
  /**
184
187
  * LOG: Sends telemetry after execution.
@@ -197,13 +200,14 @@ declare class AgentID {
197
200
  * Wrap an OpenAI client once; AgentID will automatically:
198
201
  * - run guard() before chat.completions.create
199
202
  * - measure latency
200
- * - fire-and-forget ingest logging
203
+ * - persist ingest telemetry for the wrapped call
201
204
  */
202
205
  wrapOpenAI<T>(openai: T, options: {
203
206
  system_id: string;
204
207
  user_id?: string;
205
208
  expected_languages?: string[];
206
209
  expectedLanguages?: string[];
210
+ request_identity?: Record<string, unknown>;
207
211
  apiKey?: string;
208
212
  api_key?: string;
209
213
  resolveApiKey?: (request: Record<string, unknown>) => string | undefined;
@@ -18,6 +18,7 @@ interface GuardParams {
18
18
  user_id?: string;
19
19
  client_event_id?: string;
20
20
  expected_languages?: string[];
21
+ request_identity?: Record<string, unknown>;
21
22
  client_capabilities?: {
22
23
  capabilities: {
23
24
  has_feedback_handler: boolean;
@@ -67,10 +68,11 @@ interface LogParams {
67
68
  input: string;
68
69
  output: string;
69
70
  model: string;
70
- usage?: Record<string, number>;
71
- tokens?: Record<string, number>;
71
+ usage?: Record<string, unknown>;
72
+ tokens?: Record<string, unknown>;
72
73
  latency?: number;
73
74
  user_id?: string;
75
+ request_identity?: Record<string, unknown>;
74
76
  metadata?: Record<string, unknown>;
75
77
  event_type?: "start" | "complete" | "error" | "human_override" | "security_alert" | "security_block" | "security_policy_violation" | "transparency_badge_rendered";
76
78
  severity?: "info" | "warning" | "error" | "high";
@@ -179,6 +181,7 @@ declare class AgentID {
179
181
  guard(params: GuardParams, options?: RequestOptions): Promise<GuardResponse>;
180
182
  private sendIngest;
181
183
  private extractStreamChunkText;
184
+ private extractStreamChunkUsage;
182
185
  private wrapCompletion;
183
186
  /**
184
187
  * LOG: Sends telemetry after execution.
@@ -197,13 +200,14 @@ declare class AgentID {
197
200
  * Wrap an OpenAI client once; AgentID will automatically:
198
201
  * - run guard() before chat.completions.create
199
202
  * - measure latency
200
- * - fire-and-forget ingest logging
203
+ * - persist ingest telemetry for the wrapped call
201
204
  */
202
205
  wrapOpenAI<T>(openai: T, options: {
203
206
  system_id: string;
204
207
  user_id?: string;
205
208
  expected_languages?: string[];
206
209
  expectedLanguages?: string[];
210
+ request_identity?: Record<string, unknown>;
207
211
  apiKey?: string;
208
212
  api_key?: string;
209
213
  resolveApiKey?: (request: Record<string, unknown>) => string | undefined;
@@ -1683,7 +1683,7 @@ function getInjectionScanner() {
1683
1683
 
1684
1684
  // src/sdk-version.ts
1685
1685
  var FALLBACK_SDK_VERSION = "js-0.0.0-dev";
1686
- var AGENTID_SDK_VERSION_HEADER = "js-0.1.26".trim().length > 0 ? "js-0.1.26" : FALLBACK_SDK_VERSION;
1686
+ var AGENTID_SDK_VERSION_HEADER = "js-0.1.29".trim().length > 0 ? "js-0.1.29" : FALLBACK_SDK_VERSION;
1687
1687
 
1688
1688
  // src/local-security-enforcer.ts
1689
1689
  var DEFAULT_FAIL_OPEN_CONFIG = {
@@ -3047,6 +3047,13 @@ var AgentID = class {
3047
3047
  }
3048
3048
  return text;
3049
3049
  }
3050
+ extractStreamChunkUsage(chunk) {
3051
+ if (!chunk || typeof chunk !== "object") {
3052
+ return void 0;
3053
+ }
3054
+ const usage = chunk.usage;
3055
+ return usage && typeof usage === "object" && !Array.isArray(usage) ? usage : void 0;
3056
+ }
3050
3057
  wrapCompletion(completion) {
3051
3058
  if (typeof completion === "string") {
3052
3059
  const masked = this.pii.anonymize(completion);
@@ -3070,7 +3077,9 @@ var AgentID = class {
3070
3077
  const source = completion;
3071
3078
  const collector = createCompletionChunkCollector();
3072
3079
  const extractStreamChunkText = this.extractStreamChunkText.bind(this);
3080
+ const extractStreamChunkUsage = this.extractStreamChunkUsage.bind(this);
3073
3081
  const piiManager = this.pii;
3082
+ let lastUsage;
3074
3083
  let resolveDone = null;
3075
3084
  let rejectDone = null;
3076
3085
  const done = new Promise((resolve, reject) => {
@@ -3085,6 +3094,10 @@ var AgentID = class {
3085
3094
  if (chunkText) {
3086
3095
  await collector.push(chunkText);
3087
3096
  }
3097
+ const chunkUsage = extractStreamChunkUsage(chunk);
3098
+ if (chunkUsage) {
3099
+ lastUsage = chunkUsage;
3100
+ }
3088
3101
  yield chunk;
3089
3102
  }
3090
3103
  await collector.close();
@@ -3094,7 +3107,8 @@ var AgentID = class {
3094
3107
  mode: "static",
3095
3108
  rawOutput,
3096
3109
  transformedOutput: masked.maskedText,
3097
- outputMasked: masked.maskedText !== rawOutput
3110
+ outputMasked: masked.maskedText !== rawOutput,
3111
+ usage: lastUsage
3098
3112
  });
3099
3113
  } catch (error) {
3100
3114
  await collector.abort(error);
@@ -3144,7 +3158,7 @@ var AgentID = class {
3144
3158
  * Wrap an OpenAI client once; AgentID will automatically:
3145
3159
  * - run guard() before chat.completions.create
3146
3160
  * - measure latency
3147
- * - fire-and-forget ingest logging
3161
+ * - persist ingest telemetry for the wrapped call
3148
3162
  */
3149
3163
  wrapOpenAI(openai, options) {
3150
3164
  const systemId = options.system_id;
@@ -3240,6 +3254,7 @@ var AgentID = class {
3240
3254
  user_id: options.user_id,
3241
3255
  client_event_id: clientEventId,
3242
3256
  expected_languages: expectedLanguages,
3257
+ request_identity: options.request_identity,
3243
3258
  client_capabilities: this.buildClientCapabilities(
3244
3259
  "openai",
3245
3260
  false,
@@ -3315,10 +3330,11 @@ var AgentID = class {
3315
3330
  event_id: canonicalClientEventId,
3316
3331
  system_id: systemId,
3317
3332
  user_id: options.user_id,
3333
+ request_identity: options.request_identity,
3318
3334
  input: maskedText,
3319
3335
  output: outputForLog,
3320
3336
  model: adapter.getModelName(maskedReq),
3321
- usage: void 0,
3337
+ usage: result.usage,
3322
3338
  latency: modelLatencyMs2,
3323
3339
  event_type: "complete",
3324
3340
  metadata: {
@@ -3381,6 +3397,7 @@ var AgentID = class {
3381
3397
  event_id: canonicalClientEventId,
3382
3398
  system_id: systemId,
3383
3399
  user_id: options.user_id,
3400
+ request_identity: options.request_identity,
3384
3401
  input: maskedText,
3385
3402
  output: outputForLog,
3386
3403
  model,
package/dist/index.d.mts CHANGED
@@ -1,5 +1,5 @@
1
- import { T as TransparencyMetadata } from './agentid-DviYzyAM.mjs';
2
- export { A as AgentID, D as DependencyError, G as GuardParams, a as GuardResponse, L as LogParams, P as PreparedInput, R as RequestOptions, S as SecurityBlockError } from './agentid-DviYzyAM.mjs';
1
+ import { T as TransparencyMetadata } from './agentid-agvYW2vW.mjs';
2
+ export { A as AgentID, D as DependencyError, G as GuardParams, a as GuardResponse, L as LogParams, P as PreparedInput, R as RequestOptions, S as SecurityBlockError } from './agentid-agvYW2vW.mjs';
3
3
  import * as react_jsx_runtime from 'react/jsx-runtime';
4
4
 
5
5
  type PIIMapping = Record<string, string>;
@@ -16,7 +16,7 @@ declare class PIIManager {
16
16
  deanonymize(text: string, mapping: PIIMapping): string;
17
17
  }
18
18
 
19
- type TokenUsage = Record<string, number>;
19
+ type TokenUsage = Record<string, unknown>;
20
20
  interface LLMAdapter {
21
21
  extractInput(req: unknown): string | null;
22
22
  getModelName(req: unknown, res?: unknown): string;
package/dist/index.d.ts CHANGED
@@ -1,5 +1,5 @@
1
- import { T as TransparencyMetadata } from './agentid-DviYzyAM.js';
2
- export { A as AgentID, D as DependencyError, G as GuardParams, a as GuardResponse, L as LogParams, P as PreparedInput, R as RequestOptions, S as SecurityBlockError } from './agentid-DviYzyAM.js';
1
+ import { T as TransparencyMetadata } from './agentid-agvYW2vW.js';
2
+ export { A as AgentID, D as DependencyError, G as GuardParams, a as GuardResponse, L as LogParams, P as PreparedInput, R as RequestOptions, S as SecurityBlockError } from './agentid-agvYW2vW.js';
3
3
  import * as react_jsx_runtime from 'react/jsx-runtime';
4
4
 
5
5
  type PIIMapping = Record<string, string>;
@@ -16,7 +16,7 @@ declare class PIIManager {
16
16
  deanonymize(text: string, mapping: PIIMapping): string;
17
17
  }
18
18
 
19
- type TokenUsage = Record<string, number>;
19
+ type TokenUsage = Record<string, unknown>;
20
20
  interface LLMAdapter {
21
21
  extractInput(req: unknown): string | null;
22
22
  getModelName(req: unknown, res?: unknown): string;
package/dist/index.js CHANGED
@@ -86,7 +86,7 @@ var OpenAIAdapter = class {
86
86
 
87
87
  // src/sdk-version.ts
88
88
  var FALLBACK_SDK_VERSION = "js-0.0.0-dev";
89
- var AGENTID_SDK_VERSION_HEADER = "js-0.1.26".trim().length > 0 ? "js-0.1.26" : FALLBACK_SDK_VERSION;
89
+ var AGENTID_SDK_VERSION_HEADER = "js-0.1.29".trim().length > 0 ? "js-0.1.29" : FALLBACK_SDK_VERSION;
90
90
 
91
91
  // src/pii-national-identifiers.ts
92
92
  var MAX_CANDIDATES_PER_RULE = 256;
@@ -3091,6 +3091,13 @@ var AgentID = class {
3091
3091
  }
3092
3092
  return text;
3093
3093
  }
3094
+ extractStreamChunkUsage(chunk) {
3095
+ if (!chunk || typeof chunk !== "object") {
3096
+ return void 0;
3097
+ }
3098
+ const usage = chunk.usage;
3099
+ return usage && typeof usage === "object" && !Array.isArray(usage) ? usage : void 0;
3100
+ }
3094
3101
  wrapCompletion(completion) {
3095
3102
  if (typeof completion === "string") {
3096
3103
  const masked = this.pii.anonymize(completion);
@@ -3114,7 +3121,9 @@ var AgentID = class {
3114
3121
  const source = completion;
3115
3122
  const collector = createCompletionChunkCollector();
3116
3123
  const extractStreamChunkText = this.extractStreamChunkText.bind(this);
3124
+ const extractStreamChunkUsage = this.extractStreamChunkUsage.bind(this);
3117
3125
  const piiManager = this.pii;
3126
+ let lastUsage;
3118
3127
  let resolveDone = null;
3119
3128
  let rejectDone = null;
3120
3129
  const done = new Promise((resolve, reject) => {
@@ -3129,6 +3138,10 @@ var AgentID = class {
3129
3138
  if (chunkText) {
3130
3139
  await collector.push(chunkText);
3131
3140
  }
3141
+ const chunkUsage = extractStreamChunkUsage(chunk);
3142
+ if (chunkUsage) {
3143
+ lastUsage = chunkUsage;
3144
+ }
3132
3145
  yield chunk;
3133
3146
  }
3134
3147
  await collector.close();
@@ -3138,7 +3151,8 @@ var AgentID = class {
3138
3151
  mode: "static",
3139
3152
  rawOutput,
3140
3153
  transformedOutput: masked.maskedText,
3141
- outputMasked: masked.maskedText !== rawOutput
3154
+ outputMasked: masked.maskedText !== rawOutput,
3155
+ usage: lastUsage
3142
3156
  });
3143
3157
  } catch (error) {
3144
3158
  await collector.abort(error);
@@ -3188,7 +3202,7 @@ var AgentID = class {
3188
3202
  * Wrap an OpenAI client once; AgentID will automatically:
3189
3203
  * - run guard() before chat.completions.create
3190
3204
  * - measure latency
3191
- * - fire-and-forget ingest logging
3205
+ * - persist ingest telemetry for the wrapped call
3192
3206
  */
3193
3207
  wrapOpenAI(openai, options) {
3194
3208
  const systemId = options.system_id;
@@ -3284,6 +3298,7 @@ var AgentID = class {
3284
3298
  user_id: options.user_id,
3285
3299
  client_event_id: clientEventId,
3286
3300
  expected_languages: expectedLanguages,
3301
+ request_identity: options.request_identity,
3287
3302
  client_capabilities: this.buildClientCapabilities(
3288
3303
  "openai",
3289
3304
  false,
@@ -3359,10 +3374,11 @@ var AgentID = class {
3359
3374
  event_id: canonicalClientEventId,
3360
3375
  system_id: systemId,
3361
3376
  user_id: options.user_id,
3377
+ request_identity: options.request_identity,
3362
3378
  input: maskedText,
3363
3379
  output: outputForLog,
3364
3380
  model: adapter.getModelName(maskedReq),
3365
- usage: void 0,
3381
+ usage: result.usage,
3366
3382
  latency: modelLatencyMs2,
3367
3383
  event_type: "complete",
3368
3384
  metadata: {
@@ -3425,6 +3441,7 @@ var AgentID = class {
3425
3441
  event_id: canonicalClientEventId,
3426
3442
  system_id: systemId,
3427
3443
  user_id: options.user_id,
3444
+ request_identity: options.request_identity,
3428
3445
  input: maskedText,
3429
3446
  output: outputForLog,
3430
3447
  model,
package/dist/index.mjs CHANGED
@@ -7,7 +7,7 @@ import {
7
7
  SecurityBlockError,
8
8
  getInjectionScanner,
9
9
  scanWithRegex
10
- } from "./chunk-JIQGHFHI.mjs";
10
+ } from "./chunk-JMNBPJDF.mjs";
11
11
 
12
12
  // src/transparency-badge.tsx
13
13
  import * as React from "react";
@@ -1,5 +1,5 @@
1
1
  import { BaseCallbackHandler } from '@langchain/core/callbacks/base';
2
- import { A as AgentID } from './agentid-DviYzyAM.mjs';
2
+ import { A as AgentID } from './agentid-agvYW2vW.mjs';
3
3
 
4
4
  /**
5
5
  * LangChainJS callback handler (dependency-free shape).
@@ -1,5 +1,5 @@
1
1
  import { BaseCallbackHandler } from '@langchain/core/callbacks/base';
2
- import { A as AgentID } from './agentid-DviYzyAM.js';
2
+ import { A as AgentID } from './agentid-agvYW2vW.js';
3
3
 
4
4
  /**
5
5
  * LangChainJS callback handler (dependency-free shape).
package/dist/langchain.js CHANGED
@@ -27,7 +27,7 @@ var import_base = require("@langchain/core/callbacks/base");
27
27
 
28
28
  // src/sdk-version.ts
29
29
  var FALLBACK_SDK_VERSION = "js-0.0.0-dev";
30
- var AGENTID_SDK_VERSION_HEADER = "js-0.1.26".trim().length > 0 ? "js-0.1.26" : FALLBACK_SDK_VERSION;
30
+ var AGENTID_SDK_VERSION_HEADER = "js-0.1.29".trim().length > 0 ? "js-0.1.29" : FALLBACK_SDK_VERSION;
31
31
 
32
32
  // src/pii-national-identifiers.ts
33
33
  var REGION_ANCHORS = {
@@ -1,6 +1,6 @@
1
1
  import {
2
2
  SecurityBlockError
3
- } from "./chunk-JIQGHFHI.mjs";
3
+ } from "./chunk-JMNBPJDF.mjs";
4
4
 
5
5
  // src/langchain.ts
6
6
  import { BaseCallbackHandler } from "@langchain/core/callbacks/base";
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "agentid-sdk",
3
- "version": "0.1.26",
3
+ "version": "0.1.29",
4
4
  "description": "AgentID JavaScript/TypeScript SDK for guard, ingest, tracing, and analytics.",
5
5
  "license": "MIT",
6
6
  "homepage": "https://agentid.ai",
@@ -59,8 +59,5 @@
59
59
  "@types/react": "^19.2.2",
60
60
  "tsup": "^8.3.5",
61
61
  "typescript": "^5.0.0"
62
- },
63
- "dependencies": {
64
- "agentid-sdk": "^0.1.21"
65
62
  }
66
63
  }