observa-sdk 0.0.17 → 0.0.18

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs CHANGED
@@ -1104,6 +1104,10 @@ var Observa = class {
1104
1104
  spanStack = [];
1105
1105
  // Stack for tracking parent-child relationships
1106
1106
  traceStartTime = null;
1107
+ // Track traces with errors (for automatic trace_end generation when using instrumentation)
1108
+ tracesWithErrors = /* @__PURE__ */ new Set();
1109
+ // Track root span IDs for traces (for automatic trace_end generation)
1110
+ traceRootSpanIds = /* @__PURE__ */ new Map();
1107
1111
  constructor(config) {
1108
1112
  this.apiKey = config.apiKey;
1109
1113
  let apiUrlEnv;
@@ -1194,9 +1198,13 @@ var Observa = class {
1194
1198
  addEvent(eventData) {
1195
1199
  const baseProps = this.createBaseEventProperties();
1196
1200
  const parentSpanId = this.spanStack.length > 0 ? this.spanStack[this.spanStack.length - 1] : null;
1201
+ const spanId = eventData.span_id || crypto.randomUUID();
1202
+ if (!this.currentTraceId && !this.traceRootSpanIds.has(baseProps.trace_id)) {
1203
+ this.traceRootSpanIds.set(baseProps.trace_id, spanId);
1204
+ }
1197
1205
  const event = {
1198
1206
  ...baseProps,
1199
- span_id: eventData.span_id || crypto.randomUUID(),
1207
+ span_id: spanId,
1200
1208
  parent_span_id: (eventData.parent_span_id !== void 0 ? eventData.parent_span_id : parentSpanId) ?? null,
1201
1209
  timestamp: eventData.timestamp || (/* @__PURE__ */ new Date()).toISOString(),
1202
1210
  event_type: eventData.event_type,
@@ -1381,6 +1389,8 @@ var Observa = class {
1381
1389
  if (!stackTrace && options.error instanceof Error && options.error.stack) {
1382
1390
  stackTrace = options.error.stack;
1383
1391
  }
1392
+ const baseProps = this.createBaseEventProperties();
1393
+ this.tracesWithErrors.add(baseProps.trace_id);
1384
1394
  this.addEvent({
1385
1395
  event_type: "error",
1386
1396
  span_id: spanId,
@@ -1748,6 +1758,65 @@ var Observa = class {
1748
1758
  eventsByTrace.get(event.trace_id).push(event);
1749
1759
  }
1750
1760
  for (const [traceId, events] of eventsByTrace.entries()) {
1761
+ const hasTraceStart = events.some((e) => e.event_type === "trace_start");
1762
+ const hasTraceEnd = events.some((e) => e.event_type === "trace_end");
1763
+ const hasError = this.tracesWithErrors.has(traceId);
1764
+ const rootSpanId = this.traceRootSpanIds.get(traceId) || events[0]?.span_id || crypto.randomUUID();
1765
+ const firstEvent = events[0];
1766
+ if (!firstEvent) continue;
1767
+ if (!hasTraceStart) {
1768
+ const traceStartEvent = {
1769
+ tenant_id: firstEvent.tenant_id,
1770
+ project_id: firstEvent.project_id,
1771
+ environment: firstEvent.environment,
1772
+ trace_id: traceId,
1773
+ span_id: rootSpanId,
1774
+ parent_span_id: null,
1775
+ timestamp: firstEvent.timestamp,
1776
+ event_type: "trace_start",
1777
+ attributes: {
1778
+ trace_start: {
1779
+ name: null,
1780
+ metadata: null
1781
+ }
1782
+ }
1783
+ };
1784
+ events.unshift(traceStartEvent);
1785
+ }
1786
+ if (!hasTraceEnd) {
1787
+ const llmEvents = events.filter((e) => e.event_type === "llm_call");
1788
+ const totalTokens = llmEvents.reduce(
1789
+ (sum, e) => sum + (e.attributes.llm_call?.total_tokens || 0),
1790
+ 0
1791
+ );
1792
+ const totalCost = llmEvents.reduce(
1793
+ (sum, e) => sum + (e.attributes.llm_call?.cost || 0),
1794
+ 0
1795
+ );
1796
+ const timestamps = events.map((e) => new Date(e.timestamp).getTime()).filter(Boolean);
1797
+ const totalLatency = timestamps.length > 0 ? Math.max(...timestamps) - Math.min(...timestamps) : null;
1798
+ const traceEndEvent = {
1799
+ tenant_id: firstEvent.tenant_id,
1800
+ project_id: firstEvent.project_id,
1801
+ environment: firstEvent.environment,
1802
+ trace_id: traceId,
1803
+ span_id: rootSpanId,
1804
+ parent_span_id: null,
1805
+ timestamp: (/* @__PURE__ */ new Date()).toISOString(),
1806
+ event_type: "trace_end",
1807
+ attributes: {
1808
+ trace_end: {
1809
+ total_latency_ms: totalLatency,
1810
+ total_tokens: totalTokens || null,
1811
+ total_cost: totalCost || null,
1812
+ outcome: hasError ? "error" : "success"
1813
+ }
1814
+ }
1815
+ };
1816
+ events.push(traceEndEvent);
1817
+ }
1818
+ this.tracesWithErrors.delete(traceId);
1819
+ this.traceRootSpanIds.delete(traceId);
1751
1820
  await this._sendEventsWithRetry(events);
1752
1821
  }
1753
1822
  }
@@ -1789,11 +1858,11 @@ var Observa = class {
1789
1858
  }
1790
1859
  /**
1791
1860
  * Observe OpenAI client - wraps client with automatic tracing
1792
- *
1861
+ *
1793
1862
  * @param client - OpenAI client instance
1794
1863
  * @param options - Observation options (name, tags, userId, sessionId, redact)
1795
1864
  * @returns Wrapped OpenAI client
1796
- *
1865
+ *
1797
1866
  * @example
1798
1867
  * ```typescript
1799
1868
  * import OpenAI from 'openai';
@@ -1814,11 +1883,11 @@ var Observa = class {
1814
1883
  }
1815
1884
  /**
1816
1885
  * Observe Anthropic client - wraps client with automatic tracing
1817
- *
1886
+ *
1818
1887
  * @param client - Anthropic client instance
1819
1888
  * @param options - Observation options (name, tags, userId, sessionId, redact)
1820
1889
  * @returns Wrapped Anthropic client
1821
- *
1890
+ *
1822
1891
  * @example
1823
1892
  * ```typescript
1824
1893
  * import Anthropic from '@anthropic-ai/sdk';
@@ -1839,21 +1908,21 @@ var Observa = class {
1839
1908
  }
1840
1909
  /**
1841
1910
  * Observe Vercel AI SDK - wraps generateText and streamText functions
1842
- *
1911
+ *
1843
1912
  * @param aiSdk - Vercel AI SDK module (imported from 'ai')
1844
1913
  * @param options - Observation options (name, tags, userId, sessionId, redact)
1845
1914
  * @returns Wrapped AI SDK with automatic tracing
1846
- *
1915
+ *
1847
1916
  * @example
1848
1917
  * ```typescript
1849
1918
  * import { generateText, streamText } from 'ai';
1850
1919
  * const observa = init({ apiKey: '...' });
1851
- *
1920
+ *
1852
1921
  * const ai = observa.observeVercelAI({ generateText, streamText }, {
1853
1922
  * name: 'my-app',
1854
1923
  * redact: (data) => ({ ...data, prompt: '[REDACTED]' })
1855
1924
  * });
1856
- *
1925
+ *
1857
1926
  * // Use wrapped functions - automatically tracked!
1858
1927
  * const result = await ai.generateText({
1859
1928
  * model: 'openai/gpt-4',
package/dist/index.d.cts CHANGED
@@ -38,6 +38,8 @@ declare class Observa {
38
38
  private rootSpanId;
39
39
  private spanStack;
40
40
  private traceStartTime;
41
+ private tracesWithErrors;
42
+ private traceRootSpanIds;
41
43
  constructor(config: ObservaInitConfig);
42
44
  /**
43
45
  * Flush buffered events to the API
package/dist/index.d.ts CHANGED
@@ -38,6 +38,8 @@ declare class Observa {
38
38
  private rootSpanId;
39
39
  private spanStack;
40
40
  private traceStartTime;
41
+ private tracesWithErrors;
42
+ private traceRootSpanIds;
41
43
  constructor(config: ObservaInitConfig);
42
44
  /**
43
45
  * Flush buffered events to the API
package/dist/index.js CHANGED
@@ -1084,6 +1084,10 @@ var Observa = class {
1084
1084
  spanStack = [];
1085
1085
  // Stack for tracking parent-child relationships
1086
1086
  traceStartTime = null;
1087
+ // Track traces with errors (for automatic trace_end generation when using instrumentation)
1088
+ tracesWithErrors = /* @__PURE__ */ new Set();
1089
+ // Track root span IDs for traces (for automatic trace_end generation)
1090
+ traceRootSpanIds = /* @__PURE__ */ new Map();
1087
1091
  constructor(config) {
1088
1092
  this.apiKey = config.apiKey;
1089
1093
  let apiUrlEnv;
@@ -1174,9 +1178,13 @@ var Observa = class {
1174
1178
  addEvent(eventData) {
1175
1179
  const baseProps = this.createBaseEventProperties();
1176
1180
  const parentSpanId = this.spanStack.length > 0 ? this.spanStack[this.spanStack.length - 1] : null;
1181
+ const spanId = eventData.span_id || crypto.randomUUID();
1182
+ if (!this.currentTraceId && !this.traceRootSpanIds.has(baseProps.trace_id)) {
1183
+ this.traceRootSpanIds.set(baseProps.trace_id, spanId);
1184
+ }
1177
1185
  const event = {
1178
1186
  ...baseProps,
1179
- span_id: eventData.span_id || crypto.randomUUID(),
1187
+ span_id: spanId,
1180
1188
  parent_span_id: (eventData.parent_span_id !== void 0 ? eventData.parent_span_id : parentSpanId) ?? null,
1181
1189
  timestamp: eventData.timestamp || (/* @__PURE__ */ new Date()).toISOString(),
1182
1190
  event_type: eventData.event_type,
@@ -1361,6 +1369,8 @@ var Observa = class {
1361
1369
  if (!stackTrace && options.error instanceof Error && options.error.stack) {
1362
1370
  stackTrace = options.error.stack;
1363
1371
  }
1372
+ const baseProps = this.createBaseEventProperties();
1373
+ this.tracesWithErrors.add(baseProps.trace_id);
1364
1374
  this.addEvent({
1365
1375
  event_type: "error",
1366
1376
  span_id: spanId,
@@ -1728,6 +1738,65 @@ var Observa = class {
1728
1738
  eventsByTrace.get(event.trace_id).push(event);
1729
1739
  }
1730
1740
  for (const [traceId, events] of eventsByTrace.entries()) {
1741
+ const hasTraceStart = events.some((e) => e.event_type === "trace_start");
1742
+ const hasTraceEnd = events.some((e) => e.event_type === "trace_end");
1743
+ const hasError = this.tracesWithErrors.has(traceId);
1744
+ const rootSpanId = this.traceRootSpanIds.get(traceId) || events[0]?.span_id || crypto.randomUUID();
1745
+ const firstEvent = events[0];
1746
+ if (!firstEvent) continue;
1747
+ if (!hasTraceStart) {
1748
+ const traceStartEvent = {
1749
+ tenant_id: firstEvent.tenant_id,
1750
+ project_id: firstEvent.project_id,
1751
+ environment: firstEvent.environment,
1752
+ trace_id: traceId,
1753
+ span_id: rootSpanId,
1754
+ parent_span_id: null,
1755
+ timestamp: firstEvent.timestamp,
1756
+ event_type: "trace_start",
1757
+ attributes: {
1758
+ trace_start: {
1759
+ name: null,
1760
+ metadata: null
1761
+ }
1762
+ }
1763
+ };
1764
+ events.unshift(traceStartEvent);
1765
+ }
1766
+ if (!hasTraceEnd) {
1767
+ const llmEvents = events.filter((e) => e.event_type === "llm_call");
1768
+ const totalTokens = llmEvents.reduce(
1769
+ (sum, e) => sum + (e.attributes.llm_call?.total_tokens || 0),
1770
+ 0
1771
+ );
1772
+ const totalCost = llmEvents.reduce(
1773
+ (sum, e) => sum + (e.attributes.llm_call?.cost || 0),
1774
+ 0
1775
+ );
1776
+ const timestamps = events.map((e) => new Date(e.timestamp).getTime()).filter(Boolean);
1777
+ const totalLatency = timestamps.length > 0 ? Math.max(...timestamps) - Math.min(...timestamps) : null;
1778
+ const traceEndEvent = {
1779
+ tenant_id: firstEvent.tenant_id,
1780
+ project_id: firstEvent.project_id,
1781
+ environment: firstEvent.environment,
1782
+ trace_id: traceId,
1783
+ span_id: rootSpanId,
1784
+ parent_span_id: null,
1785
+ timestamp: (/* @__PURE__ */ new Date()).toISOString(),
1786
+ event_type: "trace_end",
1787
+ attributes: {
1788
+ trace_end: {
1789
+ total_latency_ms: totalLatency,
1790
+ total_tokens: totalTokens || null,
1791
+ total_cost: totalCost || null,
1792
+ outcome: hasError ? "error" : "success"
1793
+ }
1794
+ }
1795
+ };
1796
+ events.push(traceEndEvent);
1797
+ }
1798
+ this.tracesWithErrors.delete(traceId);
1799
+ this.traceRootSpanIds.delete(traceId);
1731
1800
  await this._sendEventsWithRetry(events);
1732
1801
  }
1733
1802
  }
@@ -1769,11 +1838,11 @@ var Observa = class {
1769
1838
  }
1770
1839
  /**
1771
1840
  * Observe OpenAI client - wraps client with automatic tracing
1772
- *
1841
+ *
1773
1842
  * @param client - OpenAI client instance
1774
1843
  * @param options - Observation options (name, tags, userId, sessionId, redact)
1775
1844
  * @returns Wrapped OpenAI client
1776
- *
1845
+ *
1777
1846
  * @example
1778
1847
  * ```typescript
1779
1848
  * import OpenAI from 'openai';
@@ -1794,11 +1863,11 @@ var Observa = class {
1794
1863
  }
1795
1864
  /**
1796
1865
  * Observe Anthropic client - wraps client with automatic tracing
1797
- *
1866
+ *
1798
1867
  * @param client - Anthropic client instance
1799
1868
  * @param options - Observation options (name, tags, userId, sessionId, redact)
1800
1869
  * @returns Wrapped Anthropic client
1801
- *
1870
+ *
1802
1871
  * @example
1803
1872
  * ```typescript
1804
1873
  * import Anthropic from '@anthropic-ai/sdk';
@@ -1819,21 +1888,21 @@ var Observa = class {
1819
1888
  }
1820
1889
  /**
1821
1890
  * Observe Vercel AI SDK - wraps generateText and streamText functions
1822
- *
1891
+ *
1823
1892
  * @param aiSdk - Vercel AI SDK module (imported from 'ai')
1824
1893
  * @param options - Observation options (name, tags, userId, sessionId, redact)
1825
1894
  * @returns Wrapped AI SDK with automatic tracing
1826
- *
1895
+ *
1827
1896
  * @example
1828
1897
  * ```typescript
1829
1898
  * import { generateText, streamText } from 'ai';
1830
1899
  * const observa = init({ apiKey: '...' });
1831
- *
1900
+ *
1832
1901
  * const ai = observa.observeVercelAI({ generateText, streamText }, {
1833
1902
  * name: 'my-app',
1834
1903
  * redact: (data) => ({ ...data, prompt: '[REDACTED]' })
1835
1904
  * });
1836
- *
1905
+ *
1837
1906
  * // Use wrapped functions - automatically tracked!
1838
1907
  * const result = await ai.generateText({
1839
1908
  * model: 'openai/gpt-4',
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "observa-sdk",
3
- "version": "0.0.17",
3
+ "version": "0.0.18",
4
4
  "description": "Enterprise-grade observability SDK for AI applications. Track and monitor LLM interactions with zero friction.",
5
5
  "type": "module",
6
6
  "main": "./dist/index.cjs",