@fallom/trace 0.1.1 → 0.1.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -1,25 +1,22 @@
1
- var __defProp = Object.defineProperty;
2
- var __require = /* @__PURE__ */ ((x) => typeof require !== "undefined" ? require : typeof Proxy !== "undefined" ? new Proxy(x, {
3
- get: (a, b) => (typeof require !== "undefined" ? require : a)[b]
4
- }) : x)(function(x) {
5
- if (typeof require !== "undefined") return require.apply(this, arguments);
6
- throw Error('Dynamic require of "' + x + '" is not supported');
7
- });
8
- var __export = (target, all) => {
9
- for (var name in all)
10
- __defProp(target, name, { get: all[name], enumerable: true });
11
- };
1
+ import {
2
+ __export,
3
+ init,
4
+ prompts_exports
5
+ } from "./chunk-IGJD7GBO.mjs";
12
6
 
13
7
  // src/trace.ts
14
8
  var trace_exports = {};
15
9
  __export(trace_exports, {
16
10
  clearSession: () => clearSession,
17
11
  getSession: () => getSession,
18
- init: () => init,
12
+ init: () => init2,
19
13
  runWithSession: () => runWithSession,
20
14
  setSession: () => setSession,
21
15
  shutdown: () => shutdown,
22
- span: () => span
16
+ span: () => span,
17
+ wrapAnthropic: () => wrapAnthropic,
18
+ wrapGoogleAI: () => wrapGoogleAI,
19
+ wrapOpenAI: () => wrapOpenAI
23
20
  });
24
21
  import { AsyncLocalStorage } from "async_hooks";
25
22
  import { NodeSDK } from "@opentelemetry/sdk-node";
@@ -655,16 +652,25 @@ var apiKey = null;
655
652
  var baseUrl = "https://spans.fallom.com";
656
653
  var initialized = false;
657
654
  var captureContent = true;
655
+ var debugMode = false;
658
656
  var sdk = null;
657
+ function log(...args) {
658
+ if (debugMode) console.log("[Fallom]", ...args);
659
+ }
659
660
  var fallomSpanProcessor = {
660
661
  onStart(span2, _parentContext) {
662
+ log("\u{1F4CD} Span started:", span2.name || "unknown");
661
663
  const ctx = sessionStorage.getStore() || fallbackSession;
662
664
  if (ctx) {
663
665
  span2.setAttribute("fallom.config_key", ctx.configKey);
664
666
  span2.setAttribute("fallom.session_id", ctx.sessionId);
667
+ log(" Added session context:", ctx.configKey, ctx.sessionId);
668
+ } else {
669
+ log(" No session context available");
665
670
  }
666
671
  },
667
- onEnd(_span) {
672
+ onEnd(span2) {
673
+ log("\u2705 Span ended:", span2.name, "duration:", span2.duration);
668
674
  },
669
675
  shutdown() {
670
676
  return Promise.resolve();
@@ -673,8 +679,10 @@ var fallomSpanProcessor = {
673
679
  return Promise.resolve();
674
680
  }
675
681
  };
676
- function init(options = {}) {
682
+ async function init2(options = {}) {
677
683
  if (initialized) return;
684
+ debugMode = options.debug ?? false;
685
+ log("\u{1F680} Initializing Fallom tracing...");
678
686
  apiKey = options.apiKey || process.env.FALLOM_API_KEY || null;
679
687
  baseUrl = options.baseUrl || process.env.FALLOM_BASE_URL || "https://spans.fallom.com";
680
688
  const envCapture = process.env.FALLOM_CAPTURE_CONTENT?.toLowerCase();
@@ -689,39 +697,85 @@ function init(options = {}) {
689
697
  );
690
698
  }
691
699
  initialized = true;
700
+ log("\u{1F4E1} Exporter URL:", `${baseUrl}/v1/traces`);
692
701
  const exporter = new OTLPTraceExporter({
693
702
  url: `${baseUrl}/v1/traces`,
694
703
  headers: {
695
704
  Authorization: `Bearer ${apiKey}`
696
705
  }
697
706
  });
707
+ const instrumentations = await getInstrumentations();
708
+ log("\u{1F527} Loaded instrumentations:", instrumentations.length);
698
709
  sdk = new NodeSDK({
699
710
  resource: new Resource({
700
711
  "service.name": "fallom-traced-app"
701
712
  }),
702
713
  traceExporter: exporter,
703
- spanProcessor: fallomSpanProcessor
714
+ spanProcessor: fallomSpanProcessor,
715
+ instrumentations
704
716
  });
705
717
  sdk.start();
706
- autoInstrument();
718
+ log("\u2705 SDK started");
707
719
  process.on("SIGTERM", () => {
708
720
  sdk?.shutdown().catch(console.error);
709
721
  });
710
722
  }
711
- function autoInstrument() {
723
+ async function getInstrumentations() {
724
+ const instrumentations = [];
725
+ await tryAddInstrumentation(
726
+ instrumentations,
727
+ "@traceloop/instrumentation-openai",
728
+ "OpenAIInstrumentation"
729
+ );
730
+ await tryAddInstrumentation(
731
+ instrumentations,
732
+ "@traceloop/instrumentation-anthropic",
733
+ "AnthropicInstrumentation"
734
+ );
735
+ await tryAddInstrumentation(
736
+ instrumentations,
737
+ "@traceloop/instrumentation-cohere",
738
+ "CohereInstrumentation"
739
+ );
740
+ await tryAddInstrumentation(
741
+ instrumentations,
742
+ "@traceloop/instrumentation-bedrock",
743
+ "BedrockInstrumentation"
744
+ );
745
+ await tryAddInstrumentation(
746
+ instrumentations,
747
+ "@traceloop/instrumentation-google-generativeai",
748
+ "GoogleGenerativeAIInstrumentation"
749
+ );
750
+ await tryAddInstrumentation(
751
+ instrumentations,
752
+ "@traceloop/instrumentation-azure",
753
+ "AzureOpenAIInstrumentation"
754
+ );
755
+ await tryAddInstrumentation(
756
+ instrumentations,
757
+ "@traceloop/instrumentation-vertexai",
758
+ "VertexAIInstrumentation"
759
+ );
760
+ return instrumentations;
761
+ }
762
+ async function tryAddInstrumentation(instrumentations, pkg, className) {
712
763
  try {
713
- const traceloopModule = __require("@traceloop/node-server-sdk");
714
- const Traceloop = traceloopModule.Traceloop || traceloopModule.default?.Traceloop || traceloopModule;
715
- if (!Traceloop?.initialize) {
716
- return;
764
+ const mod = await import(pkg);
765
+ const InstrumentationClass = mod[className] || mod.default?.[className];
766
+ if (InstrumentationClass) {
767
+ instrumentations.push(
768
+ new InstrumentationClass({ traceContent: captureContent })
769
+ );
770
+ log(` \u2705 Loaded ${pkg}`);
771
+ } else {
772
+ log(
773
+ ` \u26A0\uFE0F ${pkg} loaded but ${className} not found. Available:`,
774
+ Object.keys(mod)
775
+ );
717
776
  }
718
- Traceloop.initialize({
719
- baseUrl,
720
- apiKey,
721
- disableBatch: true,
722
- traceContent: captureContent
723
- });
724
- } catch {
777
+ } catch (e) {
778
+ log(` \u274C ${pkg} not installed`);
725
779
  }
726
780
  }
727
781
  function setSession(configKey, sessionId) {
@@ -783,28 +837,235 @@ async function shutdown() {
783
837
  initialized = false;
784
838
  }
785
839
  }
840
+ async function sendTrace(trace) {
841
+ try {
842
+ const controller = new AbortController();
843
+ const timeoutId = setTimeout(() => controller.abort(), 5e3);
844
+ await fetch(`${baseUrl}/v1/traces`, {
845
+ method: "POST",
846
+ headers: {
847
+ Authorization: `Bearer ${apiKey}`,
848
+ "Content-Type": "application/json"
849
+ },
850
+ body: JSON.stringify(trace),
851
+ signal: controller.signal
852
+ });
853
+ clearTimeout(timeoutId);
854
+ log("\u{1F4E4} Trace sent:", trace.name, trace.model);
855
+ } catch {
856
+ }
857
+ }
858
+ function wrapOpenAI(client) {
859
+ const originalCreate = client.chat.completions.create.bind(
860
+ client.chat.completions
861
+ );
862
+ client.chat.completions.create = async function(...args) {
863
+ const ctx = sessionStorage.getStore() || fallbackSession;
864
+ if (!ctx || !initialized) {
865
+ return originalCreate(...args);
866
+ }
867
+ let promptCtx = null;
868
+ try {
869
+ const { getPromptContext } = await import("./prompts-67DJ33I4.mjs");
870
+ promptCtx = getPromptContext();
871
+ } catch {
872
+ }
873
+ const params = args[0] || {};
874
+ const startTime = Date.now();
875
+ try {
876
+ const response = await originalCreate(...args);
877
+ const endTime = Date.now();
878
+ sendTrace({
879
+ config_key: ctx.configKey,
880
+ session_id: ctx.sessionId,
881
+ name: "chat.completions.create",
882
+ model: response?.model || params?.model,
883
+ start_time: new Date(startTime).toISOString(),
884
+ end_time: new Date(endTime).toISOString(),
885
+ duration_ms: endTime - startTime,
886
+ status: "OK",
887
+ prompt_tokens: response?.usage?.prompt_tokens,
888
+ completion_tokens: response?.usage?.completion_tokens,
889
+ total_tokens: response?.usage?.total_tokens,
890
+ input: captureContent ? JSON.stringify(params?.messages) : void 0,
891
+ output: captureContent ? response?.choices?.[0]?.message?.content : void 0,
892
+ prompt_key: promptCtx?.promptKey,
893
+ prompt_version: promptCtx?.promptVersion,
894
+ prompt_ab_test_key: promptCtx?.abTestKey,
895
+ prompt_variant_index: promptCtx?.variantIndex
896
+ }).catch(() => {
897
+ });
898
+ return response;
899
+ } catch (error) {
900
+ const endTime = Date.now();
901
+ sendTrace({
902
+ config_key: ctx.configKey,
903
+ session_id: ctx.sessionId,
904
+ name: "chat.completions.create",
905
+ model: params?.model,
906
+ start_time: new Date(startTime).toISOString(),
907
+ end_time: new Date(endTime).toISOString(),
908
+ duration_ms: endTime - startTime,
909
+ status: "ERROR",
910
+ error_message: error?.message,
911
+ prompt_key: promptCtx?.promptKey,
912
+ prompt_version: promptCtx?.promptVersion,
913
+ prompt_ab_test_key: promptCtx?.abTestKey,
914
+ prompt_variant_index: promptCtx?.variantIndex
915
+ }).catch(() => {
916
+ });
917
+ throw error;
918
+ }
919
+ };
920
+ return client;
921
+ }
922
+ function wrapAnthropic(client) {
923
+ const originalCreate = client.messages.create.bind(client.messages);
924
+ client.messages.create = async function(...args) {
925
+ const ctx = sessionStorage.getStore() || fallbackSession;
926
+ if (!ctx || !initialized) {
927
+ return originalCreate(...args);
928
+ }
929
+ let promptCtx = null;
930
+ try {
931
+ const { getPromptContext } = await import("./prompts-67DJ33I4.mjs");
932
+ promptCtx = getPromptContext();
933
+ } catch {
934
+ }
935
+ const params = args[0] || {};
936
+ const startTime = Date.now();
937
+ try {
938
+ const response = await originalCreate(...args);
939
+ const endTime = Date.now();
940
+ sendTrace({
941
+ config_key: ctx.configKey,
942
+ session_id: ctx.sessionId,
943
+ name: "messages.create",
944
+ model: response?.model || params?.model,
945
+ start_time: new Date(startTime).toISOString(),
946
+ end_time: new Date(endTime).toISOString(),
947
+ duration_ms: endTime - startTime,
948
+ status: "OK",
949
+ prompt_tokens: response?.usage?.input_tokens,
950
+ completion_tokens: response?.usage?.output_tokens,
951
+ total_tokens: (response?.usage?.input_tokens || 0) + (response?.usage?.output_tokens || 0),
952
+ input: captureContent ? JSON.stringify(params?.messages) : void 0,
953
+ output: captureContent ? response?.content?.[0]?.text : void 0,
954
+ prompt_key: promptCtx?.promptKey,
955
+ prompt_version: promptCtx?.promptVersion,
956
+ prompt_ab_test_key: promptCtx?.abTestKey,
957
+ prompt_variant_index: promptCtx?.variantIndex
958
+ }).catch(() => {
959
+ });
960
+ return response;
961
+ } catch (error) {
962
+ const endTime = Date.now();
963
+ sendTrace({
964
+ config_key: ctx.configKey,
965
+ session_id: ctx.sessionId,
966
+ name: "messages.create",
967
+ model: params?.model,
968
+ start_time: new Date(startTime).toISOString(),
969
+ end_time: new Date(endTime).toISOString(),
970
+ duration_ms: endTime - startTime,
971
+ status: "ERROR",
972
+ error_message: error?.message,
973
+ prompt_key: promptCtx?.promptKey,
974
+ prompt_version: promptCtx?.promptVersion,
975
+ prompt_ab_test_key: promptCtx?.abTestKey,
976
+ prompt_variant_index: promptCtx?.variantIndex
977
+ }).catch(() => {
978
+ });
979
+ throw error;
980
+ }
981
+ };
982
+ return client;
983
+ }
984
+ function wrapGoogleAI(model) {
985
+ const originalGenerate = model.generateContent.bind(model);
986
+ model.generateContent = async function(...args) {
987
+ const ctx = sessionStorage.getStore() || fallbackSession;
988
+ if (!ctx || !initialized) {
989
+ return originalGenerate(...args);
990
+ }
991
+ let promptCtx = null;
992
+ try {
993
+ const { getPromptContext } = await import("./prompts-67DJ33I4.mjs");
994
+ promptCtx = getPromptContext();
995
+ } catch {
996
+ }
997
+ const startTime = Date.now();
998
+ try {
999
+ const response = await originalGenerate(...args);
1000
+ const endTime = Date.now();
1001
+ const result = response?.response;
1002
+ const usage = result?.usageMetadata;
1003
+ sendTrace({
1004
+ config_key: ctx.configKey,
1005
+ session_id: ctx.sessionId,
1006
+ name: "generateContent",
1007
+ model: model?.model || "gemini",
1008
+ start_time: new Date(startTime).toISOString(),
1009
+ end_time: new Date(endTime).toISOString(),
1010
+ duration_ms: endTime - startTime,
1011
+ status: "OK",
1012
+ prompt_tokens: usage?.promptTokenCount,
1013
+ completion_tokens: usage?.candidatesTokenCount,
1014
+ total_tokens: usage?.totalTokenCount,
1015
+ input: captureContent ? JSON.stringify(args[0]) : void 0,
1016
+ output: captureContent ? result?.text?.() : void 0,
1017
+ prompt_key: promptCtx?.promptKey,
1018
+ prompt_version: promptCtx?.promptVersion,
1019
+ prompt_ab_test_key: promptCtx?.abTestKey,
1020
+ prompt_variant_index: promptCtx?.variantIndex
1021
+ }).catch(() => {
1022
+ });
1023
+ return response;
1024
+ } catch (error) {
1025
+ const endTime = Date.now();
1026
+ sendTrace({
1027
+ config_key: ctx.configKey,
1028
+ session_id: ctx.sessionId,
1029
+ name: "generateContent",
1030
+ model: model?.model || "gemini",
1031
+ start_time: new Date(startTime).toISOString(),
1032
+ end_time: new Date(endTime).toISOString(),
1033
+ duration_ms: endTime - startTime,
1034
+ status: "ERROR",
1035
+ error_message: error?.message,
1036
+ prompt_key: promptCtx?.promptKey,
1037
+ prompt_version: promptCtx?.promptVersion,
1038
+ prompt_ab_test_key: promptCtx?.abTestKey,
1039
+ prompt_variant_index: promptCtx?.variantIndex
1040
+ }).catch(() => {
1041
+ });
1042
+ throw error;
1043
+ }
1044
+ };
1045
+ return model;
1046
+ }
786
1047
 
787
1048
  // src/models.ts
788
1049
  var models_exports = {};
789
1050
  __export(models_exports, {
790
1051
  get: () => get,
791
- init: () => init2
1052
+ init: () => init3
792
1053
  });
793
1054
  import { createHash } from "crypto";
794
1055
  var apiKey2 = null;
795
1056
  var baseUrl2 = "https://spans.fallom.com";
796
1057
  var initialized2 = false;
797
1058
  var syncInterval = null;
798
- var debugMode = false;
1059
+ var debugMode2 = false;
799
1060
  var configCache = /* @__PURE__ */ new Map();
800
1061
  var SYNC_TIMEOUT = 2e3;
801
1062
  var RECORD_TIMEOUT = 1e3;
802
- function log(msg) {
803
- if (debugMode) {
1063
+ function log2(msg) {
1064
+ if (debugMode2) {
804
1065
  console.log(`[Fallom] ${msg}`);
805
1066
  }
806
1067
  }
807
- function init2(options = {}) {
1068
+ function init3(options = {}) {
808
1069
  apiKey2 = options.apiKey || process.env.FALLOM_API_KEY || null;
809
1070
  baseUrl2 = options.baseUrl || process.env.FALLOM_BASE_URL || "https://spans.fallom.com";
810
1071
  initialized2 = true;
@@ -824,18 +1085,18 @@ function init2(options = {}) {
824
1085
  function ensureInit() {
825
1086
  if (!initialized2) {
826
1087
  try {
827
- init2();
1088
+ init3();
828
1089
  } catch {
829
1090
  }
830
1091
  }
831
1092
  }
832
1093
  async function fetchConfigs(timeout = SYNC_TIMEOUT) {
833
1094
  if (!apiKey2) {
834
- log("_fetchConfigs: No API key, skipping");
1095
+ log2("_fetchConfigs: No API key, skipping");
835
1096
  return;
836
1097
  }
837
1098
  try {
838
- log(`Fetching configs from ${baseUrl2}/configs`);
1099
+ log2(`Fetching configs from ${baseUrl2}/configs`);
839
1100
  const controller = new AbortController();
840
1101
  const timeoutId = setTimeout(() => controller.abort(), timeout);
841
1102
  const resp = await fetch(`${baseUrl2}/configs`, {
@@ -843,15 +1104,15 @@ async function fetchConfigs(timeout = SYNC_TIMEOUT) {
843
1104
  signal: controller.signal
844
1105
  });
845
1106
  clearTimeout(timeoutId);
846
- log(`Response status: ${resp.status}`);
1107
+ log2(`Response status: ${resp.status}`);
847
1108
  if (resp.ok) {
848
1109
  const data = await resp.json();
849
1110
  const configs = data.configs || [];
850
- log(`Got ${configs.length} configs: ${configs.map((c) => c.key)}`);
1111
+ log2(`Got ${configs.length} configs: ${configs.map((c) => c.key)}`);
851
1112
  for (const c of configs) {
852
1113
  const key = c.key;
853
1114
  const version = c.version || 1;
854
- log(`Config '${key}' v${version}: ${JSON.stringify(c.variants)}`);
1115
+ log2(`Config '${key}' v${version}: ${JSON.stringify(c.variants)}`);
855
1116
  if (!configCache.has(key)) {
856
1117
  configCache.set(key, { versions: /* @__PURE__ */ new Map(), latest: null });
857
1118
  }
@@ -860,10 +1121,10 @@ async function fetchConfigs(timeout = SYNC_TIMEOUT) {
860
1121
  cached.latest = version;
861
1122
  }
862
1123
  } else {
863
- log(`Fetch failed: ${resp.statusText}`);
1124
+ log2(`Fetch failed: ${resp.statusText}`);
864
1125
  }
865
1126
  } catch (e) {
866
- log(`Fetch exception: ${e}`);
1127
+ log2(`Fetch exception: ${e}`);
867
1128
  }
868
1129
  }
869
1130
  async function fetchSpecificVersion(configKey, version, timeout = SYNC_TIMEOUT) {
@@ -893,20 +1154,20 @@ async function fetchSpecificVersion(configKey, version, timeout = SYNC_TIMEOUT)
893
1154
  }
894
1155
  async function get(configKey, sessionId, options = {}) {
895
1156
  const { version, fallback, debug = false } = options;
896
- debugMode = debug;
1157
+ debugMode2 = debug;
897
1158
  ensureInit();
898
- log(`get() called: configKey=${configKey}, sessionId=${sessionId}, fallback=${fallback}`);
1159
+ log2(`get() called: configKey=${configKey}, sessionId=${sessionId}, fallback=${fallback}`);
899
1160
  try {
900
1161
  let configData = configCache.get(configKey);
901
- log(`Cache lookup for '${configKey}': ${configData ? "found" : "not found"}`);
1162
+ log2(`Cache lookup for '${configKey}': ${configData ? "found" : "not found"}`);
902
1163
  if (!configData) {
903
- log("Not in cache, fetching...");
1164
+ log2("Not in cache, fetching...");
904
1165
  await fetchConfigs(SYNC_TIMEOUT);
905
1166
  configData = configCache.get(configKey);
906
- log(`After fetch, cache lookup: ${configData ? "found" : "still not found"}`);
1167
+ log2(`After fetch, cache lookup: ${configData ? "found" : "still not found"}`);
907
1168
  }
908
1169
  if (!configData) {
909
- log(`Config not found, using fallback: ${fallback}`);
1170
+ log2(`Config not found, using fallback: ${fallback}`);
910
1171
  if (fallback) {
911
1172
  console.warn(`[Fallom WARNING] Config '${configKey}' not found, using fallback model: ${fallback}`);
912
1173
  return returnWithTrace(configKey, sessionId, fallback, 0);
@@ -944,22 +1205,22 @@ async function get(configKey, sessionId, options = {}) {
944
1205
  const variantsRaw = config.variants;
945
1206
  const configVersion = config.version || targetVersion;
946
1207
  const variants = Array.isArray(variantsRaw) ? variantsRaw : Object.values(variantsRaw);
947
- log(`Config found! Version: ${configVersion}, Variants: ${JSON.stringify(variants)}`);
1208
+ log2(`Config found! Version: ${configVersion}, Variants: ${JSON.stringify(variants)}`);
948
1209
  const hashBytes = createHash("md5").update(sessionId).digest();
949
1210
  const hashVal = hashBytes.readUInt32BE(0) % 1e6;
950
- log(`Session hash: ${hashVal} (out of 1,000,000)`);
1211
+ log2(`Session hash: ${hashVal} (out of 1,000,000)`);
951
1212
  let cumulative = 0;
952
1213
  let assignedModel = variants[variants.length - 1].model;
953
1214
  for (const v of variants) {
954
1215
  const oldCumulative = cumulative;
955
1216
  cumulative += v.weight * 1e4;
956
- log(`Variant ${v.model}: weight=${v.weight}%, range=${oldCumulative}-${cumulative}, hash=${hashVal}, match=${hashVal < cumulative}`);
1217
+ log2(`Variant ${v.model}: weight=${v.weight}%, range=${oldCumulative}-${cumulative}, hash=${hashVal}, match=${hashVal < cumulative}`);
957
1218
  if (hashVal < cumulative) {
958
1219
  assignedModel = v.model;
959
1220
  break;
960
1221
  }
961
1222
  }
962
- log(`\u2705 Assigned model: ${assignedModel}`);
1223
+ log2(`\u2705 Assigned model: ${assignedModel}`);
963
1224
  return returnWithTrace(configKey, sessionId, assignedModel, configVersion);
964
1225
  } catch (e) {
965
1226
  if (e instanceof Error && e.message.includes("not found")) {
@@ -1008,14 +1269,19 @@ async function recordSession(configKey, version, sessionId, model) {
1008
1269
  }
1009
1270
 
1010
1271
  // src/init.ts
1011
- function init3(options = {}) {
1272
+ async function init4(options = {}) {
1012
1273
  const baseUrl3 = options.baseUrl || process.env.FALLOM_BASE_URL || "https://spans.fallom.com";
1013
- init({
1274
+ await init2({
1014
1275
  apiKey: options.apiKey,
1015
1276
  baseUrl: baseUrl3,
1016
- captureContent: options.captureContent
1277
+ captureContent: options.captureContent,
1278
+ debug: options.debug
1017
1279
  });
1018
- init2({
1280
+ init3({
1281
+ apiKey: options.apiKey,
1282
+ baseUrl: baseUrl3
1283
+ });
1284
+ init({
1019
1285
  apiKey: options.apiKey,
1020
1286
  baseUrl: baseUrl3
1021
1287
  });
@@ -1023,13 +1289,15 @@ function init3(options = {}) {
1023
1289
 
1024
1290
  // src/index.ts
1025
1291
  var index_default = {
1026
- init: init3,
1292
+ init: init4,
1027
1293
  trace: trace_exports,
1028
- models: models_exports
1294
+ models: models_exports,
1295
+ prompts: prompts_exports
1029
1296
  };
1030
1297
  export {
1031
1298
  index_default as default,
1032
- init3 as init,
1299
+ init4 as init,
1033
1300
  models_exports as models,
1301
+ prompts_exports as prompts,
1034
1302
  trace_exports as trace
1035
1303
  };
@@ -0,0 +1,14 @@
1
+ import {
2
+ clearPromptContext,
3
+ get,
4
+ getAB,
5
+ getPromptContext,
6
+ init
7
+ } from "./chunk-IGJD7GBO.mjs";
8
+ export {
9
+ clearPromptContext,
10
+ get,
11
+ getAB,
12
+ getPromptContext,
13
+ init
14
+ };
@@ -0,0 +1,14 @@
1
+ import {
2
+ clearPromptContext,
3
+ get,
4
+ getAB,
5
+ getPromptContext,
6
+ init
7
+ } from "./chunk-VNUUS74T.mjs";
8
+ export {
9
+ clearPromptContext,
10
+ get,
11
+ getAB,
12
+ getPromptContext,
13
+ init
14
+ };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@fallom/trace",
3
- "version": "0.1.1",
3
+ "version": "0.1.4",
4
4
  "description": "Model A/B testing and tracing for LLM applications. Zero latency, production-ready.",
5
5
  "main": "./dist/index.js",
6
6
  "module": "./dist/index.mjs",
@@ -20,7 +20,10 @@
20
20
  "dev": "tsup src/index.ts --format cjs,esm --dts --watch",
21
21
  "test": "vitest",
22
22
  "lint": "eslint src/",
23
- "prepublishOnly": "npm run build"
23
+ "prepublishOnly": "npm run build",
24
+ "publish:patch": "npm version patch && npm publish --access public",
25
+ "publish:minor": "npm version minor && npm publish --access public",
26
+ "publish:major": "npm version major && npm publish --access public"
24
27
  },
25
28
  "keywords": [
26
29
  "llm",
@@ -44,7 +47,7 @@
44
47
  "@opentelemetry/sdk-node": "^0.46.0",
45
48
  "@opentelemetry/sdk-trace-node": "^1.19.0",
46
49
  "@opentelemetry/exporter-trace-otlp-http": "^0.46.0",
47
- "@traceloop/node-server-sdk": "^0.5.0",
50
+ "@traceloop/instrumentation-openai": "^0.11.0",
48
51
  "tslib": "^2.6.0"
49
52
  },
50
53
  "devDependencies": {