@rheonic/sdk 0.1.0-beta.3 → 0.1.0-beta.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -120,12 +120,14 @@ await client.captureEvent(
120
120
  buildEvent({
121
121
  provider: "openai",
122
122
  model: "gpt-4o-mini",
123
- request: { endpoint: "/chat/completions", feature: "assistant" },
123
+ request: { endpoint: "/chat/completions", feature: "assistant", token_explosion_tokens: 64 },
124
124
  response: { total_tokens: 64, latency_ms: 120, http_status: 200 },
125
125
  }),
126
126
  );
127
127
  ```
128
128
 
129
+ `token_explosion_tokens` is optional. Set it only for custom/manual events when you want token-explosion detection to use the same request-context signal that the SDK instrumentation sends to both protect and ingest.
130
+
129
131
  ## Reference
130
132
 
131
133
  Full quickstart:
package/dist/client.js CHANGED
@@ -30,7 +30,13 @@ export class Client {
30
30
  constructor(config) {
31
31
  this.baseUrl = config.baseUrl ?? process.env.RHEONIC_BASE_URL ?? sdkNodeConfig.defaultBaseUrl;
32
32
  this.ingestKey = config.ingestKey;
33
- this.environment = config.environment ?? sdkNodeConfig.defaultEnvironment;
33
+ this.environment =
34
+ config.environment ??
35
+ process.env.NODE_ENV ??
36
+ process.env.APP_ENV ??
37
+ process.env.ENVIRONMENT ??
38
+ process.env.ENV ??
39
+ sdkNodeConfig.defaultEnvironment;
34
40
  this.flushIntervalMs = config.flushIntervalMs ?? sdkNodeConfig.defaultFlushIntervalMs;
35
41
  this.maxQueueSize = config.maxQueueSize ?? sdkNodeConfig.defaultMaxQueueSize;
36
42
  this.overflowPolicy = config.overflowPolicy ?? "drop_oldest";
package/dist/config.d.ts CHANGED
@@ -1,12 +1,12 @@
1
1
  export declare const sdkNodeConfig: {
2
2
  readonly defaultBaseUrl: "http://localhost:8000";
3
- readonly defaultEnvironment: "dev";
3
+ readonly defaultEnvironment: "unknown";
4
4
  readonly defaultFlushIntervalMs: 1000;
5
5
  readonly defaultMaxQueueSize: 1000;
6
6
  readonly defaultFlushTimeoutMs: 500;
7
7
  readonly defaultRequestTimeoutMs: 1000;
8
8
  readonly defaultProtectFailMode: "open";
9
- readonly internalProtectDecisionTimeoutMs: 150;
9
+ readonly internalProtectDecisionTimeoutMs: 160;
10
10
  readonly retryDelayMinMs: 200;
11
11
  readonly retryDelayMaxMs: 400;
12
12
  readonly defaultTokenizerEncoding: "cl100k_base";
package/dist/config.js CHANGED
@@ -1,12 +1,12 @@
1
1
  export const sdkNodeConfig = {
2
2
  defaultBaseUrl: "http://localhost:8000",
3
- defaultEnvironment: "dev",
3
+ defaultEnvironment: "unknown",
4
4
  defaultFlushIntervalMs: 1000,
5
5
  defaultMaxQueueSize: 1000,
6
6
  defaultFlushTimeoutMs: 500,
7
7
  defaultRequestTimeoutMs: 1000,
8
8
  defaultProtectFailMode: "open",
9
- internalProtectDecisionTimeoutMs: 150,
9
+ internalProtectDecisionTimeoutMs: 160,
10
10
  retryDelayMinMs: 200,
11
11
  retryDelayMaxMs: 400,
12
12
  defaultTokenizerEncoding: "cl100k_base",
@@ -3,6 +3,7 @@ export interface EventRequest {
3
3
  feature?: string;
4
4
  input_tokens?: number;
5
5
  input_tokens_estimate?: number;
6
+ token_explosion_tokens?: number;
6
7
  max_output_tokens?: number;
7
8
  protect_decision?: string;
8
9
  protect_reason?: string;
package/dist/logger.js CHANGED
@@ -23,7 +23,12 @@ export function emitLog(params) {
23
23
  timestamp: new Date().toISOString(),
24
24
  level: params.level,
25
25
  service: SERVICE_NAME,
26
- env: (params.environment ?? process.env.RHEONIC_ENV ?? process.env.NODE_ENV ?? "dev").toLowerCase(),
26
+ env: (params.environment ??
27
+ process.env.NODE_ENV ??
28
+ process.env.APP_ENV ??
29
+ process.env.ENVIRONMENT ??
30
+ process.env.ENV ??
31
+ "unknown").toLowerCase(),
27
32
  trace_id: params.traceId ?? getTraceId(),
28
33
  span_id: params.spanId ?? getSpanId(),
29
34
  event: sanitizeEvent(params.event),
@@ -45,4 +45,4 @@ export declare class ProtectEngine {
45
45
  private reportDecisionTimeout;
46
46
  private reportDecisionUnavailable;
47
47
  }
48
- export declare const defaultProtectTimeoutMs: 150;
48
+ export declare const defaultProtectTimeoutMs: 160;
@@ -55,6 +55,7 @@ export function instrumentAnthropic(anthropicClient, options) {
55
55
  request: {
56
56
  endpoint: options.endpoint,
57
57
  feature: options.feature,
58
+ token_explosion_tokens: typeof estimatedInputTokens === "number" ? estimatedInputTokens : undefined,
58
59
  input_tokens_estimate: typeof estimatedInputTokens === "number" ? estimatedInputTokens : undefined,
59
60
  protect_decision: protectDecision.decision === "warn" ? "warn" : undefined,
60
61
  protect_reason: protectDecision.decision === "warn" ? protectDecision.reason : undefined,
@@ -75,6 +76,7 @@ export function instrumentAnthropic(anthropicClient, options) {
75
76
  request: {
76
77
  endpoint: options.endpoint,
77
78
  feature: options.feature,
79
+ token_explosion_tokens: typeof estimatedInputTokens === "number" ? estimatedInputTokens : undefined,
78
80
  input_tokens_estimate: typeof estimatedInputTokens === "number" ? estimatedInputTokens : undefined,
79
81
  protect_decision: protectDecision.decision === "warn" ? "warn" : undefined,
80
82
  protect_reason: protectDecision.decision === "warn" ? protectDecision.reason : undefined,
@@ -55,6 +55,7 @@ export function instrumentGoogle(googleModel, options) {
55
55
  request: {
56
56
  endpoint: options.endpoint,
57
57
  feature: options.feature,
58
+ token_explosion_tokens: typeof estimatedInputTokens === "number" ? estimatedInputTokens : undefined,
58
59
  input_tokens_estimate: typeof estimatedInputTokens === "number" ? estimatedInputTokens : undefined,
59
60
  protect_decision: protectDecision.decision === "warn" ? "warn" : undefined,
60
61
  protect_reason: protectDecision.decision === "warn" ? protectDecision.reason : undefined,
@@ -75,6 +76,7 @@ export function instrumentGoogle(googleModel, options) {
75
76
  request: {
76
77
  endpoint: options.endpoint,
77
78
  feature: options.feature,
79
+ token_explosion_tokens: typeof estimatedInputTokens === "number" ? estimatedInputTokens : undefined,
78
80
  input_tokens_estimate: typeof estimatedInputTokens === "number" ? estimatedInputTokens : undefined,
79
81
  protect_decision: protectDecision.decision === "warn" ? "warn" : undefined,
80
82
  protect_reason: protectDecision.decision === "warn" ? protectDecision.reason : undefined,
@@ -56,6 +56,7 @@ export function instrumentOpenAI(openaiClient, options) {
56
56
  request: {
57
57
  endpoint: options.endpoint,
58
58
  feature: options.feature,
59
+ token_explosion_tokens: typeof estimatedInputTokens === "number" ? estimatedInputTokens : undefined,
59
60
  protect_decision: protectDecision.decision === "warn" ? "warn" : undefined,
60
61
  protect_reason: protectDecision.decision === "warn" ? protectDecision.reason : undefined,
61
62
  },
@@ -75,6 +76,7 @@ export function instrumentOpenAI(openaiClient, options) {
75
76
  request: {
76
77
  endpoint: options.endpoint,
77
78
  feature: options.feature,
79
+ token_explosion_tokens: typeof estimatedInputTokens === "number" ? estimatedInputTokens : undefined,
78
80
  protect_decision: protectDecision.decision === "warn" ? "warn" : undefined,
79
81
  protect_reason: protectDecision.decision === "warn" ? protectDecision.reason : undefined,
80
82
  },
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@rheonic/sdk",
3
- "version": "0.1.0-beta.3",
3
+ "version": "0.1.0-beta.5",
4
4
  "description": "Node.js SDK for Rheonic observability and protect preflight enforcement.",
5
5
  "author": "Rheonic <founder@rheonic.dev>",
6
6
  "license": "MIT",