scorecard-ai 2.6.0 → 3.0.0-beta.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +39 -0
- package/LICENSE +1 -1
- package/README.md +9 -0
- package/index.d.mts +1 -0
- package/index.d.mts.map +1 -1
- package/index.d.ts +1 -0
- package/index.d.ts.map +1 -1
- package/index.js +5 -1
- package/index.js.map +1 -1
- package/index.mjs +1 -0
- package/index.mjs.map +1 -1
- package/lib/wrapLLMs.d.mts +63 -0
- package/lib/wrapLLMs.d.mts.map +1 -0
- package/lib/wrapLLMs.d.ts +63 -0
- package/lib/wrapLLMs.d.ts.map +1 -0
- package/lib/wrapLLMs.js +386 -0
- package/lib/wrapLLMs.js.map +1 -0
- package/lib/wrapLLMs.mjs +382 -0
- package/lib/wrapLLMs.mjs.map +1 -0
- package/package.json +1 -1
- package/src/index.ts +1 -0
- package/src/lib/wrapLLMs.ts +485 -0
- package/src/version.ts +1 -1
- package/version.d.mts +1 -1
- package/version.d.mts.map +1 -1
- package/version.d.ts +1 -1
- package/version.d.ts.map +1 -1
- package/version.js +1 -1
- package/version.js.map +1 -1
- package/version.mjs +1 -1
- package/version.mjs.map +1 -1
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,44 @@
|
|
|
1
1
|
# Changelog
|
|
2
2
|
|
|
3
|
+
## 3.0.0-beta.0 (2026-01-14)
|
|
4
|
+
|
|
5
|
+
Full Changelog: [v2.6.0...v3.0.0-beta.0](https://github.com/scorecard-ai/scorecard-node/compare/v2.6.0...v3.0.0-beta.0)
|
|
6
|
+
|
|
7
|
+
### ⚠ BREAKING CHANGES
|
|
8
|
+
|
|
9
|
+
* **mcp:** remove deprecated tool schemes
|
|
10
|
+
* **mcp:** **Migration:** To migrate, simply modify the command used to invoke the MCP server. Currently, the only supported tool scheme is code mode. Now, starting the server with just `node /path/to/mcp/server` or `npx package-name` will invoke code tools: changing your command to one of these is likely all you will need to do.
|
|
11
|
+
|
|
12
|
+
### Features
|
|
13
|
+
|
|
14
|
+
* Add telemetry wrappers around Anthropic and OpenAI SDKs ([#31](https://github.com/scorecard-ai/scorecard-node/issues/31)) ([bb4dfe5](https://github.com/scorecard-ai/scorecard-node/commit/bb4dfe590993bfb07d28a1db96622f86654366d6))
|
|
15
|
+
* **mcp:** Use Scorecard API key from MCP client when executing code tool ([741fbdd](https://github.com/scorecard-ai/scorecard-node/commit/741fbdd38c9becc69f4e7a6de845dcff556434df))
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
### Bug Fixes
|
|
19
|
+
|
|
20
|
+
* **mcp:** correct code tool api output types ([c3983d7](https://github.com/scorecard-ai/scorecard-node/commit/c3983d734ccbe019061105f99ba6b2b8727e8b0f))
|
|
21
|
+
* **mcp:** fix env parsing ([22c7368](https://github.com/scorecard-ai/scorecard-node/commit/22c73681d18660d972a9836fe20926b5d402b534))
|
|
22
|
+
* **mcp:** fix options parsing ([c19c6e3](https://github.com/scorecard-ai/scorecard-node/commit/c19c6e3a2da944a83b55419acb2c72ae9a1e1f9d))
|
|
23
|
+
* **mcp:** pass base url to code tool ([2a855d6](https://github.com/scorecard-ai/scorecard-node/commit/2a855d651ec9260455d9b1172cf543f9a334c6b8))
|
|
24
|
+
* **mcp:** update code tool prompt ([cec30f5](https://github.com/scorecard-ai/scorecard-node/commit/cec30f5761b9af71a10bdbe0a5aacff277c4a04b))
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
### Chores
|
|
28
|
+
|
|
29
|
+
* break long lines in snippets into multiline ([6eb4de7](https://github.com/scorecard-ai/scorecard-node/commit/6eb4de7c08e0f14041714c8fb93aba4b1ab8de1a))
|
|
30
|
+
* **internal:** codegen related update ([e1a1b97](https://github.com/scorecard-ai/scorecard-node/commit/e1a1b97a6516933282ba19368dfcd4abb6adecb6))
|
|
31
|
+
* **internal:** codegen related update ([5c31e8f](https://github.com/scorecard-ai/scorecard-node/commit/5c31e8f81cbc97a6ade6567e9342231b79333eb9))
|
|
32
|
+
* **internal:** codegen related update ([d638ee7](https://github.com/scorecard-ai/scorecard-node/commit/d638ee7d5c8d6c7a61e98f261a4aec42f57d1202))
|
|
33
|
+
* **internal:** codegen related update ([8a98749](https://github.com/scorecard-ai/scorecard-node/commit/8a987491b973353fb77457075e3dbbd5d462c8bc))
|
|
34
|
+
* **internal:** codegen related update ([32953e6](https://github.com/scorecard-ai/scorecard-node/commit/32953e6cf9066fb0952863950cd63575bf52435c))
|
|
35
|
+
* **mcp:** remove deprecated tool schemes ([7175afa](https://github.com/scorecard-ai/scorecard-node/commit/7175afa508ca459c54ade311ecb970b316c1cc87))
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
### Documentation
|
|
39
|
+
|
|
40
|
+
* prominently feature MCP server setup in root SDK readmes ([2c59b3f](https://github.com/scorecard-ai/scorecard-node/commit/2c59b3f39f3d14b27b2f13f6183437446c25bb54))
|
|
41
|
+
|
|
3
42
|
## 2.6.0 (2025-12-11)
|
|
4
43
|
|
|
5
44
|
Full Changelog: [v2.5.0...v2.6.0](https://github.com/scorecard-ai/scorecard-node/compare/v2.5.0...v2.6.0)
|
package/LICENSE
CHANGED
|
@@ -186,7 +186,7 @@
|
|
|
186
186
|
same "printed page" as the copyright notice for easier
|
|
187
187
|
identification within third-party archives.
|
|
188
188
|
|
|
189
|
-
Copyright
|
|
189
|
+
Copyright 2026 Scorecard
|
|
190
190
|
|
|
191
191
|
Licensed under the Apache License, Version 2.0 (the "License");
|
|
192
192
|
you may not use this file except in compliance with the License.
|
package/README.md
CHANGED
|
@@ -8,6 +8,15 @@ The REST API documentation can be found on [docs.scorecard.io](https://docs.scor
|
|
|
8
8
|
|
|
9
9
|
It is generated with [Stainless](https://www.stainless.com/).
|
|
10
10
|
|
|
11
|
+
## MCP Server
|
|
12
|
+
|
|
13
|
+
Use the Scorecard MCP Server to enable AI assistants to interact with this API, allowing them to explore endpoints, make test requests, and use documentation to help integrate this SDK into your application.
|
|
14
|
+
|
|
15
|
+
[](https://cursor.com/en-US/install-mcp?name=scorecard-ai-mcp&config=eyJjb21tYW5kIjoibnB4IiwiYXJncyI6WyIteSIsInNjb3JlY2FyZC1haS1tY3AiXX0)
|
|
16
|
+
[](https://vscode.stainless.com/mcp/%7B%22name%22%3A%22scorecard-ai-mcp%22%2C%22command%22%3A%22npx%22%2C%22args%22%3A%5B%22-y%22%2C%22scorecard-ai-mcp%22%5D%7D)
|
|
17
|
+
|
|
18
|
+
> Note: You may need to set environment variables in your MCP client.
|
|
19
|
+
|
|
11
20
|
## Installation
|
|
12
21
|
|
|
13
22
|
```sh
|
package/index.d.mts
CHANGED
|
@@ -6,4 +6,5 @@ export { PagePromise } from "./core/pagination.mjs";
|
|
|
6
6
|
export { ScorecardError, APIError, APIConnectionError, APIConnectionTimeoutError, APIUserAbortError, NotFoundError, ConflictError, RateLimitError, BadRequestError, AuthenticationError, InternalServerError, PermissionDeniedError, UnprocessableEntityError, } from "./core/error.mjs";
|
|
7
7
|
export { runAndEvaluate } from "./lib/runAndEvaluate.mjs";
|
|
8
8
|
export { wrapAISDK } from "./lib/wrapAISDK.mjs";
|
|
9
|
+
export { wrap, wrapOpenAI, wrapAnthropic } from "./lib/wrapLLMs.mjs";
|
|
9
10
|
//# sourceMappingURL=index.d.mts.map
|
package/index.d.mts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.mts","sourceRoot":"","sources":["src/index.ts"],"names":[],"mappings":"OAEO,EAAE,SAAS,IAAI,OAAO,EAAE;OAExB,EAAE,KAAK,UAAU,EAAE,MAAM,EAAE;OAC3B,EAAE,UAAU,EAAE;OACd,EAAE,SAAS,EAAE,KAAK,aAAa,EAAE;OACjC,EAAE,WAAW,EAAE;OACf,EACL,cAAc,EACd,QAAQ,EACR,kBAAkB,EAClB,yBAAyB,EACzB,iBAAiB,EACjB,aAAa,EACb,aAAa,EACb,cAAc,EACd,eAAe,EACf,mBAAmB,EACnB,mBAAmB,EACnB,qBAAqB,EACrB,wBAAwB,GACzB;OAEM,EAAE,cAAc,EAAE;OAClB,EAAE,SAAS,EAAE"}
|
|
1
|
+
{"version":3,"file":"index.d.mts","sourceRoot":"","sources":["src/index.ts"],"names":[],"mappings":"OAEO,EAAE,SAAS,IAAI,OAAO,EAAE;OAExB,EAAE,KAAK,UAAU,EAAE,MAAM,EAAE;OAC3B,EAAE,UAAU,EAAE;OACd,EAAE,SAAS,EAAE,KAAK,aAAa,EAAE;OACjC,EAAE,WAAW,EAAE;OACf,EACL,cAAc,EACd,QAAQ,EACR,kBAAkB,EAClB,yBAAyB,EACzB,iBAAiB,EACjB,aAAa,EACb,aAAa,EACb,cAAc,EACd,eAAe,EACf,mBAAmB,EACnB,mBAAmB,EACnB,qBAAqB,EACrB,wBAAwB,GACzB;OAEM,EAAE,cAAc,EAAE;OAClB,EAAE,SAAS,EAAE;OACb,EAAE,IAAI,EAAE,UAAU,EAAE,aAAa,EAAE"}
|
package/index.d.ts
CHANGED
|
@@ -6,4 +6,5 @@ export { PagePromise } from "./core/pagination.js";
|
|
|
6
6
|
export { ScorecardError, APIError, APIConnectionError, APIConnectionTimeoutError, APIUserAbortError, NotFoundError, ConflictError, RateLimitError, BadRequestError, AuthenticationError, InternalServerError, PermissionDeniedError, UnprocessableEntityError, } from "./core/error.js";
|
|
7
7
|
export { runAndEvaluate } from "./lib/runAndEvaluate.js";
|
|
8
8
|
export { wrapAISDK } from "./lib/wrapAISDK.js";
|
|
9
|
+
export { wrap, wrapOpenAI, wrapAnthropic } from "./lib/wrapLLMs.js";
|
|
9
10
|
//# sourceMappingURL=index.d.ts.map
|
package/index.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["src/index.ts"],"names":[],"mappings":"OAEO,EAAE,SAAS,IAAI,OAAO,EAAE;OAExB,EAAE,KAAK,UAAU,EAAE,MAAM,EAAE;OAC3B,EAAE,UAAU,EAAE;OACd,EAAE,SAAS,EAAE,KAAK,aAAa,EAAE;OACjC,EAAE,WAAW,EAAE;OACf,EACL,cAAc,EACd,QAAQ,EACR,kBAAkB,EAClB,yBAAyB,EACzB,iBAAiB,EACjB,aAAa,EACb,aAAa,EACb,cAAc,EACd,eAAe,EACf,mBAAmB,EACnB,mBAAmB,EACnB,qBAAqB,EACrB,wBAAwB,GACzB;OAEM,EAAE,cAAc,EAAE;OAClB,EAAE,SAAS,EAAE"}
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["src/index.ts"],"names":[],"mappings":"OAEO,EAAE,SAAS,IAAI,OAAO,EAAE;OAExB,EAAE,KAAK,UAAU,EAAE,MAAM,EAAE;OAC3B,EAAE,UAAU,EAAE;OACd,EAAE,SAAS,EAAE,KAAK,aAAa,EAAE;OACjC,EAAE,WAAW,EAAE;OACf,EACL,cAAc,EACd,QAAQ,EACR,kBAAkB,EAClB,yBAAyB,EACzB,iBAAiB,EACjB,aAAa,EACb,aAAa,EACb,cAAc,EACd,eAAe,EACf,mBAAmB,EACnB,mBAAmB,EACnB,qBAAqB,EACrB,wBAAwB,GACzB;OAEM,EAAE,cAAc,EAAE;OAClB,EAAE,SAAS,EAAE;OACb,EAAE,IAAI,EAAE,UAAU,EAAE,aAAa,EAAE"}
|
package/index.js
CHANGED
|
@@ -4,7 +4,7 @@ exports = module.exports = function (...args) {
|
|
|
4
4
|
return new exports.default(...args)
|
|
5
5
|
}
|
|
6
6
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
7
|
-
exports.wrapAISDK = exports.runAndEvaluate = exports.UnprocessableEntityError = exports.PermissionDeniedError = exports.InternalServerError = exports.AuthenticationError = exports.BadRequestError = exports.RateLimitError = exports.ConflictError = exports.NotFoundError = exports.APIUserAbortError = exports.APIConnectionTimeoutError = exports.APIConnectionError = exports.APIError = exports.ScorecardError = exports.PagePromise = exports.Scorecard = exports.APIPromise = exports.toFile = exports.default = void 0;
|
|
7
|
+
exports.wrapAnthropic = exports.wrapOpenAI = exports.wrap = exports.wrapAISDK = exports.runAndEvaluate = exports.UnprocessableEntityError = exports.PermissionDeniedError = exports.InternalServerError = exports.AuthenticationError = exports.BadRequestError = exports.RateLimitError = exports.ConflictError = exports.NotFoundError = exports.APIUserAbortError = exports.APIConnectionTimeoutError = exports.APIConnectionError = exports.APIError = exports.ScorecardError = exports.PagePromise = exports.Scorecard = exports.APIPromise = exports.toFile = exports.default = void 0;
|
|
8
8
|
var client_1 = require("./client.js");
|
|
9
9
|
Object.defineProperty(exports, "default", { enumerable: true, get: function () { return client_1.Scorecard; } });
|
|
10
10
|
var uploads_1 = require("./core/uploads.js");
|
|
@@ -33,4 +33,8 @@ var runAndEvaluate_1 = require("./lib/runAndEvaluate.js");
|
|
|
33
33
|
Object.defineProperty(exports, "runAndEvaluate", { enumerable: true, get: function () { return runAndEvaluate_1.runAndEvaluate; } });
|
|
34
34
|
var wrapAISDK_1 = require("./lib/wrapAISDK.js");
|
|
35
35
|
Object.defineProperty(exports, "wrapAISDK", { enumerable: true, get: function () { return wrapAISDK_1.wrapAISDK; } });
|
|
36
|
+
var wrapLLMs_1 = require("./lib/wrapLLMs.js");
|
|
37
|
+
Object.defineProperty(exports, "wrap", { enumerable: true, get: function () { return wrapLLMs_1.wrap; } });
|
|
38
|
+
Object.defineProperty(exports, "wrapOpenAI", { enumerable: true, get: function () { return wrapLLMs_1.wrapOpenAI; } });
|
|
39
|
+
Object.defineProperty(exports, "wrapAnthropic", { enumerable: true, get: function () { return wrapLLMs_1.wrapAnthropic; } });
|
|
36
40
|
//# sourceMappingURL=index.js.map
|
package/index.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.js","sourceRoot":"","sources":["src/index.ts"],"names":[],"mappings":";AAAA,sFAAsF;;;AAEtF,sCAAgD;AAAvC,iGAAA,SAAS,OAAW;AAE7B,6CAAyD;AAA/B,iGAAA,MAAM,OAAA;AAChC,qDAAgD;AAAvC,yGAAA,UAAU,OAAA;AACnB,sCAAyD;AAAhD,mGAAA,SAAS,OAAA;AAClB,mDAAgD;AAAvC,yGAAA,WAAW,OAAA;AACpB,yCAcsB;AAbpB,uGAAA,cAAc,OAAA;AACd,iGAAA,QAAQ,OAAA;AACR,2GAAA,kBAAkB,OAAA;AAClB,kHAAA,yBAAyB,OAAA;AACzB,0GAAA,iBAAiB,OAAA;AACjB,sGAAA,aAAa,OAAA;AACb,sGAAA,aAAa,OAAA;AACb,uGAAA,cAAc,OAAA;AACd,wGAAA,eAAe,OAAA;AACf,4GAAA,mBAAmB,OAAA;AACnB,4GAAA,mBAAmB,OAAA;AACnB,8GAAA,qBAAqB,OAAA;AACrB,iHAAA,wBAAwB,OAAA;AAG1B,0DAAsD;AAA7C,gHAAA,cAAc,OAAA;AACvB,gDAA4C;AAAnC,sGAAA,SAAS,OAAA"}
|
|
1
|
+
{"version":3,"file":"index.js","sourceRoot":"","sources":["src/index.ts"],"names":[],"mappings":";AAAA,sFAAsF;;;AAEtF,sCAAgD;AAAvC,iGAAA,SAAS,OAAW;AAE7B,6CAAyD;AAA/B,iGAAA,MAAM,OAAA;AAChC,qDAAgD;AAAvC,yGAAA,UAAU,OAAA;AACnB,sCAAyD;AAAhD,mGAAA,SAAS,OAAA;AAClB,mDAAgD;AAAvC,yGAAA,WAAW,OAAA;AACpB,yCAcsB;AAbpB,uGAAA,cAAc,OAAA;AACd,iGAAA,QAAQ,OAAA;AACR,2GAAA,kBAAkB,OAAA;AAClB,kHAAA,yBAAyB,OAAA;AACzB,0GAAA,iBAAiB,OAAA;AACjB,sGAAA,aAAa,OAAA;AACb,sGAAA,aAAa,OAAA;AACb,uGAAA,cAAc,OAAA;AACd,wGAAA,eAAe,OAAA;AACf,4GAAA,mBAAmB,OAAA;AACnB,4GAAA,mBAAmB,OAAA;AACnB,8GAAA,qBAAqB,OAAA;AACrB,iHAAA,wBAAwB,OAAA;AAG1B,0DAAsD;AAA7C,gHAAA,cAAc,OAAA;AACvB,gDAA4C;AAAnC,sGAAA,SAAS,OAAA;AAClB,8CAAiE;AAAxD,gGAAA,IAAI,OAAA;AAAE,sGAAA,UAAU,OAAA;AAAE,yGAAA,aAAa,OAAA"}
|
package/index.mjs
CHANGED
|
@@ -7,4 +7,5 @@ export { PagePromise } from "./core/pagination.mjs";
|
|
|
7
7
|
export { ScorecardError, APIError, APIConnectionError, APIConnectionTimeoutError, APIUserAbortError, NotFoundError, ConflictError, RateLimitError, BadRequestError, AuthenticationError, InternalServerError, PermissionDeniedError, UnprocessableEntityError, } from "./core/error.mjs";
|
|
8
8
|
export { runAndEvaluate } from "./lib/runAndEvaluate.mjs";
|
|
9
9
|
export { wrapAISDK } from "./lib/wrapAISDK.mjs";
|
|
10
|
+
export { wrap, wrapOpenAI, wrapAnthropic } from "./lib/wrapLLMs.mjs";
|
|
10
11
|
//# sourceMappingURL=index.mjs.map
|
package/index.mjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.mjs","sourceRoot":"","sources":["src/index.ts"],"names":[],"mappings":"AAAA,sFAAsF;OAE/E,EAAE,SAAS,IAAI,OAAO,EAAE;OAExB,EAAmB,MAAM,EAAE;OAC3B,EAAE,UAAU,EAAE;OACd,EAAE,SAAS,EAAsB;OACjC,EAAE,WAAW,EAAE;OACf,EACL,cAAc,EACd,QAAQ,EACR,kBAAkB,EAClB,yBAAyB,EACzB,iBAAiB,EACjB,aAAa,EACb,aAAa,EACb,cAAc,EACd,eAAe,EACf,mBAAmB,EACnB,mBAAmB,EACnB,qBAAqB,EACrB,wBAAwB,GACzB;OAEM,EAAE,cAAc,EAAE;OAClB,EAAE,SAAS,EAAE"}
|
|
1
|
+
{"version":3,"file":"index.mjs","sourceRoot":"","sources":["src/index.ts"],"names":[],"mappings":"AAAA,sFAAsF;OAE/E,EAAE,SAAS,IAAI,OAAO,EAAE;OAExB,EAAmB,MAAM,EAAE;OAC3B,EAAE,UAAU,EAAE;OACd,EAAE,SAAS,EAAsB;OACjC,EAAE,WAAW,EAAE;OACf,EACL,cAAc,EACd,QAAQ,EACR,kBAAkB,EAClB,yBAAyB,EACzB,iBAAiB,EACjB,aAAa,EACb,aAAa,EACb,cAAc,EACd,eAAe,EACf,mBAAmB,EACnB,mBAAmB,EACnB,qBAAqB,EACrB,wBAAwB,GACzB;OAEM,EAAE,cAAc,EAAE;OAClB,EAAE,SAAS,EAAE;OACb,EAAE,IAAI,EAAE,UAAU,EAAE,aAAa,EAAE"}
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Configuration for wrapping LLM SDKs
|
|
3
|
+
*/
|
|
4
|
+
interface WrapConfig {
|
|
5
|
+
/**
|
|
6
|
+
* ID of the Scorecard project that traces should be associated with.
|
|
7
|
+
* Defaults to SCORECARD_PROJECT_ID environment variable.
|
|
8
|
+
*/
|
|
9
|
+
projectId?: string | undefined;
|
|
10
|
+
/**
|
|
11
|
+
* Scorecard API key for authentication.
|
|
12
|
+
* Defaults to SCORECARD_API_KEY environment variable.
|
|
13
|
+
*/
|
|
14
|
+
apiKey?: string | undefined;
|
|
15
|
+
/**
|
|
16
|
+
* Service name for telemetry.
|
|
17
|
+
* Defaults to "llm-app".
|
|
18
|
+
*/
|
|
19
|
+
serviceName?: string | undefined;
|
|
20
|
+
/**
|
|
21
|
+
* OTLP endpoint for trace export.
|
|
22
|
+
* Defaults to "https://tracing.scorecard.io/otel/v1/traces".
|
|
23
|
+
*/
|
|
24
|
+
endpoint?: string | undefined;
|
|
25
|
+
/**
|
|
26
|
+
* Maximum batch size of spans to be exported in a single request.
|
|
27
|
+
* Lower values provide faster feedback but more network requests.
|
|
28
|
+
* Higher values are more efficient but delay span visibility.
|
|
29
|
+
* @default 1
|
|
30
|
+
*/
|
|
31
|
+
maxExportBatchSize?: number | undefined;
|
|
32
|
+
}
|
|
33
|
+
/**
|
|
34
|
+
* Wrap any LLM SDK (OpenAI or Anthropic) to automatically trace all API calls
|
|
35
|
+
*
|
|
36
|
+
* @example
|
|
37
|
+
* ```typescript
|
|
38
|
+
* import { wrap } from '@scorecard/node';
|
|
39
|
+
* import OpenAI from 'openai';
|
|
40
|
+
* import Anthropic from '@anthropic-ai/sdk';
|
|
41
|
+
*
|
|
42
|
+
* // Works with OpenAI
|
|
43
|
+
* const openai = wrap(new OpenAI({ apiKey: '...' }), {
|
|
44
|
+
* apiKey: process.env.SCORECARD_API_KEY,
|
|
45
|
+
* projectId: '123'
|
|
46
|
+
* });
|
|
47
|
+
*
|
|
48
|
+
* // Works with Anthropic
|
|
49
|
+
* const claude = wrap(new Anthropic({ apiKey: '...' }), {
|
|
50
|
+
* apiKey: process.env.SCORECARD_API_KEY,
|
|
51
|
+
* projectId: '123'
|
|
52
|
+
* });
|
|
53
|
+
*
|
|
54
|
+
* // Use normally - traces are automatically sent to Scorecard
|
|
55
|
+
* const response = await openai.chat.completions.create({...});
|
|
56
|
+
* const response2 = await claude.messages.create({...});
|
|
57
|
+
* ```
|
|
58
|
+
*/
|
|
59
|
+
export declare function wrap<T>(client: T, config?: WrapConfig): T;
|
|
60
|
+
export declare const wrapOpenAI: typeof wrap;
|
|
61
|
+
export declare const wrapAnthropic: typeof wrap;
|
|
62
|
+
export {};
|
|
63
|
+
//# sourceMappingURL=wrapLLMs.d.mts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"wrapLLMs.d.mts","sourceRoot":"","sources":["../src/lib/wrapLLMs.ts"],"names":[],"mappings":"AAcA;;GAEG;AACH,UAAU,UAAU;IAClB;;;OAGG;IACH,SAAS,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;IAE/B;;;OAGG;IACH,MAAM,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;IAE5B;;;OAGG;IACH,WAAW,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;IAEjC;;;OAGG;IACH,QAAQ,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;IAE9B;;;;;OAKG;IACH,kBAAkB,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;CACzC;AAmTD;;;;;;;;;;;;;;;;;;;;;;;;;GAyBG;AACH,wBAAgB,IAAI,CAAC,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,MAAM,GAAE,UAAe,GAAG,CAAC,CAkG7D;AAGD,eAAO,MAAM,UAAU,aAAO,CAAC;AAC/B,eAAO,MAAM,aAAa,aAAO,CAAC"}
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Configuration for wrapping LLM SDKs
|
|
3
|
+
*/
|
|
4
|
+
interface WrapConfig {
|
|
5
|
+
/**
|
|
6
|
+
* ID of the Scorecard project that traces should be associated with.
|
|
7
|
+
* Defaults to SCORECARD_PROJECT_ID environment variable.
|
|
8
|
+
*/
|
|
9
|
+
projectId?: string | undefined;
|
|
10
|
+
/**
|
|
11
|
+
* Scorecard API key for authentication.
|
|
12
|
+
* Defaults to SCORECARD_API_KEY environment variable.
|
|
13
|
+
*/
|
|
14
|
+
apiKey?: string | undefined;
|
|
15
|
+
/**
|
|
16
|
+
* Service name for telemetry.
|
|
17
|
+
* Defaults to "llm-app".
|
|
18
|
+
*/
|
|
19
|
+
serviceName?: string | undefined;
|
|
20
|
+
/**
|
|
21
|
+
* OTLP endpoint for trace export.
|
|
22
|
+
* Defaults to "https://tracing.scorecard.io/otel/v1/traces".
|
|
23
|
+
*/
|
|
24
|
+
endpoint?: string | undefined;
|
|
25
|
+
/**
|
|
26
|
+
* Maximum batch size of spans to be exported in a single request.
|
|
27
|
+
* Lower values provide faster feedback but more network requests.
|
|
28
|
+
* Higher values are more efficient but delay span visibility.
|
|
29
|
+
* @default 1
|
|
30
|
+
*/
|
|
31
|
+
maxExportBatchSize?: number | undefined;
|
|
32
|
+
}
|
|
33
|
+
/**
|
|
34
|
+
* Wrap any LLM SDK (OpenAI or Anthropic) to automatically trace all API calls
|
|
35
|
+
*
|
|
36
|
+
* @example
|
|
37
|
+
* ```typescript
|
|
38
|
+
* import { wrap } from '@scorecard/node';
|
|
39
|
+
* import OpenAI from 'openai';
|
|
40
|
+
* import Anthropic from '@anthropic-ai/sdk';
|
|
41
|
+
*
|
|
42
|
+
* // Works with OpenAI
|
|
43
|
+
* const openai = wrap(new OpenAI({ apiKey: '...' }), {
|
|
44
|
+
* apiKey: process.env.SCORECARD_API_KEY,
|
|
45
|
+
* projectId: '123'
|
|
46
|
+
* });
|
|
47
|
+
*
|
|
48
|
+
* // Works with Anthropic
|
|
49
|
+
* const claude = wrap(new Anthropic({ apiKey: '...' }), {
|
|
50
|
+
* apiKey: process.env.SCORECARD_API_KEY,
|
|
51
|
+
* projectId: '123'
|
|
52
|
+
* });
|
|
53
|
+
*
|
|
54
|
+
* // Use normally - traces are automatically sent to Scorecard
|
|
55
|
+
* const response = await openai.chat.completions.create({...});
|
|
56
|
+
* const response2 = await claude.messages.create({...});
|
|
57
|
+
* ```
|
|
58
|
+
*/
|
|
59
|
+
export declare function wrap<T>(client: T, config?: WrapConfig): T;
|
|
60
|
+
export declare const wrapOpenAI: typeof wrap;
|
|
61
|
+
export declare const wrapAnthropic: typeof wrap;
|
|
62
|
+
export {};
|
|
63
|
+
//# sourceMappingURL=wrapLLMs.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"wrapLLMs.d.ts","sourceRoot":"","sources":["../src/lib/wrapLLMs.ts"],"names":[],"mappings":"AAcA;;GAEG;AACH,UAAU,UAAU;IAClB;;;OAGG;IACH,SAAS,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;IAE/B;;;OAGG;IACH,MAAM,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;IAE5B;;;OAGG;IACH,WAAW,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;IAEjC;;;OAGG;IACH,QAAQ,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;IAE9B;;;;;OAKG;IACH,kBAAkB,CAAC,EAAE,MAAM,GAAG,SAAS,CAAC;CACzC;AAmTD;;;;;;;;;;;;;;;;;;;;;;;;;GAyBG;AACH,wBAAgB,IAAI,CAAC,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,MAAM,GAAE,UAAe,GAAG,CAAC,CAkG7D;AAGD,eAAO,MAAM,UAAU,aAAO,CAAC;AAC/B,eAAO,MAAM,aAAa,aAAO,CAAC"}
|
package/lib/wrapLLMs.js
ADDED
|
@@ -0,0 +1,386 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.wrapAnthropic = exports.wrapOpenAI = void 0;
|
|
4
|
+
exports.wrap = wrap;
|
|
5
|
+
const api_1 = require("@opentelemetry/api");
|
|
6
|
+
const sdk_trace_node_1 = require("@opentelemetry/sdk-trace-node");
|
|
7
|
+
const exporter_trace_otlp_http_1 = require("@opentelemetry/exporter-trace-otlp-http");
|
|
8
|
+
const resources_1 = require("@opentelemetry/resources");
|
|
9
|
+
const semantic_conventions_1 = require("@opentelemetry/semantic-conventions");
|
|
10
|
+
const utils_1 = require("../internal/utils.js");
|
|
11
|
+
const error_1 = require("../error.js");
|
|
12
|
+
/**
|
|
13
|
+
* Custom exporter that wraps OTLP exporter and injects projectId from span attributes
|
|
14
|
+
* into the resource before export. This allows per-span projectId while keeping
|
|
15
|
+
* ResourceAttributes where the backend expects them.
|
|
16
|
+
*/
|
|
17
|
+
class ScorecardExporter extends exporter_trace_otlp_http_1.OTLPTraceExporter {
|
|
18
|
+
export(spans, resultCallback) {
|
|
19
|
+
// For each span, inject all scorecard.* attributes into the resource
|
|
20
|
+
spans.forEach((span) => {
|
|
21
|
+
// Collect all scorecard.* attributes from span attributes
|
|
22
|
+
const scorecardAttrs = Object.entries(span.attributes).reduce((acc, [key, value]) => {
|
|
23
|
+
if (key.startsWith('scorecard.')) {
|
|
24
|
+
acc[key] = value;
|
|
25
|
+
}
|
|
26
|
+
return acc;
|
|
27
|
+
}, {});
|
|
28
|
+
if (Object.keys(scorecardAttrs).length > 0) {
|
|
29
|
+
// Merge all scorecard.* attributes into the resource
|
|
30
|
+
const newResource = span.resource.merge((0, resources_1.resourceFromAttributes)(scorecardAttrs));
|
|
31
|
+
// Directly assign the new resource (cast to any to bypass readonly)
|
|
32
|
+
span.resource = newResource;
|
|
33
|
+
}
|
|
34
|
+
});
|
|
35
|
+
// Call the parent exporter with modified spans
|
|
36
|
+
super.export(spans, resultCallback);
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
/**
|
|
40
|
+
* Composite processor that forwards span events to all registered processors.
|
|
41
|
+
* Allows dynamic addition of exporters after provider registration.
|
|
42
|
+
*/
|
|
43
|
+
class CompositeProcessor {
|
|
44
|
+
constructor() {
|
|
45
|
+
this.processors = new Map();
|
|
46
|
+
}
|
|
47
|
+
addProcessor(apiKey, endpoint, maxExportBatchSize) {
|
|
48
|
+
const key = `${apiKey}:${endpoint}`;
|
|
49
|
+
if (this.processors.has(key))
|
|
50
|
+
return;
|
|
51
|
+
const exporter = new ScorecardExporter({
|
|
52
|
+
url: endpoint,
|
|
53
|
+
headers: { Authorization: `Bearer ${apiKey}` },
|
|
54
|
+
});
|
|
55
|
+
const processor = new sdk_trace_node_1.BatchSpanProcessor(exporter, {
|
|
56
|
+
maxExportBatchSize,
|
|
57
|
+
});
|
|
58
|
+
this.processors.set(key, processor);
|
|
59
|
+
}
|
|
60
|
+
onStart(span, parentContext) {
|
|
61
|
+
for (const processor of this.processors.values()) {
|
|
62
|
+
processor.onStart(span, parentContext);
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
onEnd(span) {
|
|
66
|
+
for (const processor of this.processors.values()) {
|
|
67
|
+
processor.onEnd(span);
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
async forceFlush() {
|
|
71
|
+
await Promise.all(Array.from(this.processors.values()).map((p) => p.forceFlush()));
|
|
72
|
+
}
|
|
73
|
+
async shutdown() {
|
|
74
|
+
await Promise.all(Array.from(this.processors.values()).map((p) => p.shutdown()));
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
let globalProvider = null;
|
|
78
|
+
let globalTracer = null;
|
|
79
|
+
let compositeProcessor = null;
|
|
80
|
+
/**
|
|
81
|
+
* Initialize OpenTelemetry provider for LLM SDK wrappers.
|
|
82
|
+
* Creates a single global provider for nesting support, with exporters
|
|
83
|
+
* added dynamically for each unique apiKey+endpoint combination.
|
|
84
|
+
*/
|
|
85
|
+
function initProvider(config) {
|
|
86
|
+
const apiKey = config.apiKey || (0, utils_1.readEnv)('SCORECARD_API_KEY');
|
|
87
|
+
if (!apiKey) {
|
|
88
|
+
throw new error_1.ScorecardError('Scorecard API key is required. Set SCORECARD_API_KEY environment variable or pass apiKey in config.');
|
|
89
|
+
}
|
|
90
|
+
const endpoint = config.endpoint || 'https://tracing.scorecard.io/otel/v1/traces';
|
|
91
|
+
const serviceName = config.serviceName || 'llm-app';
|
|
92
|
+
const projectId = config.projectId || (0, utils_1.readEnv)('SCORECARD_PROJECT_ID');
|
|
93
|
+
const maxExportBatchSize = config.maxExportBatchSize ?? 1;
|
|
94
|
+
// Create the global provider once (enables span nesting)
|
|
95
|
+
if (!globalProvider) {
|
|
96
|
+
compositeProcessor = new CompositeProcessor();
|
|
97
|
+
const resource = (0, resources_1.resourceFromAttributes)({
|
|
98
|
+
[semantic_conventions_1.ATTR_SERVICE_NAME]: serviceName,
|
|
99
|
+
});
|
|
100
|
+
globalProvider = new sdk_trace_node_1.NodeTracerProvider({
|
|
101
|
+
resource,
|
|
102
|
+
spanProcessors: [compositeProcessor],
|
|
103
|
+
});
|
|
104
|
+
globalProvider.register();
|
|
105
|
+
globalTracer = api_1.trace.getTracer('scorecard-llm');
|
|
106
|
+
}
|
|
107
|
+
// Add an exporter for this specific apiKey+endpoint (if not already added)
|
|
108
|
+
compositeProcessor?.addProcessor(apiKey, endpoint, maxExportBatchSize);
|
|
109
|
+
return projectId;
|
|
110
|
+
}
|
|
111
|
+
/**
|
|
112
|
+
* Detect which LLM provider the client is for
|
|
113
|
+
*/
|
|
114
|
+
function detectProvider(client) {
|
|
115
|
+
// Check for OpenAI SDK structure
|
|
116
|
+
if (client.chat?.completions) {
|
|
117
|
+
return 'openai';
|
|
118
|
+
}
|
|
119
|
+
// Check for Anthropic SDK structure
|
|
120
|
+
if (client.messages) {
|
|
121
|
+
return 'anthropic';
|
|
122
|
+
}
|
|
123
|
+
throw new error_1.ScorecardError('Unable to detect LLM provider. Client must be an OpenAI or Anthropic SDK instance.');
|
|
124
|
+
}
|
|
125
|
+
/**
|
|
126
|
+
* Handle OpenAI-specific response parsing
|
|
127
|
+
*/
|
|
128
|
+
function handleOpenAIResponse(span, result, params) {
|
|
129
|
+
span.setAttributes({
|
|
130
|
+
'gen_ai.response.id': result.id || 'unknown',
|
|
131
|
+
'gen_ai.response.model': result.model || params.model || 'unknown',
|
|
132
|
+
'gen_ai.response.finish_reason': result.choices?.[0]?.finish_reason || 'unknown',
|
|
133
|
+
'gen_ai.usage.prompt_tokens': result.usage?.prompt_tokens || 0,
|
|
134
|
+
'gen_ai.usage.completion_tokens': result.usage?.completion_tokens || 0,
|
|
135
|
+
'gen_ai.usage.total_tokens': result.usage?.total_tokens || 0,
|
|
136
|
+
});
|
|
137
|
+
if (result.choices?.[0]?.message) {
|
|
138
|
+
span.setAttribute('gen_ai.completion.choices', JSON.stringify([result.choices[0].message]));
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
/**
|
|
142
|
+
* Handle Anthropic-specific response parsing
|
|
143
|
+
*/
|
|
144
|
+
function handleAnthropicResponse(span, result, params) {
|
|
145
|
+
span.setAttributes({
|
|
146
|
+
'gen_ai.response.id': result.id || 'unknown',
|
|
147
|
+
'gen_ai.response.model': result.model || params.model || 'unknown',
|
|
148
|
+
'gen_ai.response.finish_reason': result.stop_reason || 'unknown',
|
|
149
|
+
'gen_ai.usage.prompt_tokens': result.usage?.input_tokens || 0,
|
|
150
|
+
'gen_ai.usage.completion_tokens': result.usage?.output_tokens || 0,
|
|
151
|
+
'gen_ai.usage.total_tokens': (result.usage?.input_tokens || 0) + (result.usage?.output_tokens || 0),
|
|
152
|
+
});
|
|
153
|
+
// Collect text from all text blocks (Anthropic can return multiple content blocks)
|
|
154
|
+
if (result.content) {
|
|
155
|
+
const completionTexts = result.content.filter((c) => c.text).map((c) => c.text);
|
|
156
|
+
if (completionTexts.length > 0) {
|
|
157
|
+
span.setAttribute('gen_ai.completion.choices', JSON.stringify([{ message: { role: 'assistant', content: completionTexts.join('\n') } }]));
|
|
158
|
+
}
|
|
159
|
+
}
|
|
160
|
+
}
|
|
161
|
+
/**
|
|
162
|
+
* Wrapper for async streams that collects metadata and ends span when consumed
|
|
163
|
+
*/
|
|
164
|
+
class StreamWrapper {
|
|
165
|
+
constructor(stream, span, provider, params) {
|
|
166
|
+
this.stream = stream;
|
|
167
|
+
this.span = span;
|
|
168
|
+
this.provider = provider;
|
|
169
|
+
this.params = params;
|
|
170
|
+
this.contentParts = [];
|
|
171
|
+
this.finishReason = null;
|
|
172
|
+
this.usageData = {};
|
|
173
|
+
this.responseId = null;
|
|
174
|
+
this.model = null;
|
|
175
|
+
}
|
|
176
|
+
async *[Symbol.asyncIterator]() {
|
|
177
|
+
try {
|
|
178
|
+
for await (const chunk of this.stream) {
|
|
179
|
+
this.processChunk(chunk);
|
|
180
|
+
yield chunk;
|
|
181
|
+
}
|
|
182
|
+
}
|
|
183
|
+
finally {
|
|
184
|
+
this.finalizeSpan();
|
|
185
|
+
}
|
|
186
|
+
}
|
|
187
|
+
processChunk(chunk) {
|
|
188
|
+
// OpenAI streaming format
|
|
189
|
+
if (this.provider === 'openai') {
|
|
190
|
+
if (!this.responseId && chunk.id) {
|
|
191
|
+
this.responseId = chunk.id;
|
|
192
|
+
}
|
|
193
|
+
if (!this.model && chunk.model) {
|
|
194
|
+
this.model = chunk.model;
|
|
195
|
+
}
|
|
196
|
+
if (chunk.choices?.[0]) {
|
|
197
|
+
const choice = chunk.choices[0];
|
|
198
|
+
if (choice.delta?.content) {
|
|
199
|
+
this.contentParts.push(choice.delta.content);
|
|
200
|
+
}
|
|
201
|
+
if (choice.finish_reason) {
|
|
202
|
+
this.finishReason = choice.finish_reason;
|
|
203
|
+
}
|
|
204
|
+
}
|
|
205
|
+
// OpenAI includes usage in the last chunk with stream_options
|
|
206
|
+
if (chunk.usage) {
|
|
207
|
+
this.usageData = {
|
|
208
|
+
prompt_tokens: chunk.usage.prompt_tokens || 0,
|
|
209
|
+
completion_tokens: chunk.usage.completion_tokens || 0,
|
|
210
|
+
total_tokens: chunk.usage.total_tokens || 0,
|
|
211
|
+
};
|
|
212
|
+
}
|
|
213
|
+
}
|
|
214
|
+
// Anthropic streaming format
|
|
215
|
+
else if (this.provider === 'anthropic') {
|
|
216
|
+
if (chunk.type === 'message_start' && chunk.message) {
|
|
217
|
+
this.responseId = chunk.message.id;
|
|
218
|
+
this.model = chunk.message.model;
|
|
219
|
+
if (chunk.message.usage) {
|
|
220
|
+
this.usageData['input_tokens'] = chunk.message.usage.input_tokens || 0;
|
|
221
|
+
}
|
|
222
|
+
}
|
|
223
|
+
else if (chunk.type === 'content_block_delta' && chunk.delta?.text) {
|
|
224
|
+
this.contentParts.push(chunk.delta.text);
|
|
225
|
+
}
|
|
226
|
+
else if (chunk.type === 'message_delta') {
|
|
227
|
+
if (chunk.delta?.stop_reason) {
|
|
228
|
+
this.finishReason = chunk.delta.stop_reason;
|
|
229
|
+
}
|
|
230
|
+
if (chunk.usage?.output_tokens) {
|
|
231
|
+
this.usageData['output_tokens'] = chunk.usage.output_tokens;
|
|
232
|
+
}
|
|
233
|
+
}
|
|
234
|
+
}
|
|
235
|
+
}
|
|
236
|
+
finalizeSpan() {
|
|
237
|
+
// Set response attributes
|
|
238
|
+
this.span.setAttributes({
|
|
239
|
+
'gen_ai.response.id': this.responseId || 'unknown',
|
|
240
|
+
'gen_ai.response.model': this.model || this.params.model || 'unknown',
|
|
241
|
+
'gen_ai.response.finish_reason': this.finishReason || 'unknown',
|
|
242
|
+
});
|
|
243
|
+
// Set usage data
|
|
244
|
+
if (Object.keys(this.usageData).length > 0) {
|
|
245
|
+
if (this.provider === 'openai') {
|
|
246
|
+
this.span.setAttributes({
|
|
247
|
+
'gen_ai.usage.prompt_tokens': this.usageData['prompt_tokens'] || 0,
|
|
248
|
+
'gen_ai.usage.completion_tokens': this.usageData['completion_tokens'] || 0,
|
|
249
|
+
'gen_ai.usage.total_tokens': this.usageData['total_tokens'] || 0,
|
|
250
|
+
});
|
|
251
|
+
}
|
|
252
|
+
else if (this.provider === 'anthropic') {
|
|
253
|
+
const inputTokens = this.usageData['input_tokens'] || 0;
|
|
254
|
+
const outputTokens = this.usageData['output_tokens'] || 0;
|
|
255
|
+
this.span.setAttributes({
|
|
256
|
+
'gen_ai.usage.prompt_tokens': inputTokens,
|
|
257
|
+
'gen_ai.usage.completion_tokens': outputTokens,
|
|
258
|
+
'gen_ai.usage.total_tokens': inputTokens + outputTokens,
|
|
259
|
+
});
|
|
260
|
+
}
|
|
261
|
+
}
|
|
262
|
+
// Set completion content if any was collected
|
|
263
|
+
if (this.contentParts.length > 0) {
|
|
264
|
+
const content = this.contentParts.join('');
|
|
265
|
+
this.span.setAttribute('gen_ai.completion.choices', JSON.stringify([{ message: { role: 'assistant', content } }]));
|
|
266
|
+
}
|
|
267
|
+
this.span.end();
|
|
268
|
+
}
|
|
269
|
+
}
|
|
270
|
+
/**
|
|
271
|
+
* Wrap any LLM SDK (OpenAI or Anthropic) to automatically trace all API calls
|
|
272
|
+
*
|
|
273
|
+
* @example
|
|
274
|
+
* ```typescript
|
|
275
|
+
* import { wrap } from '@scorecard/node';
|
|
276
|
+
* import OpenAI from 'openai';
|
|
277
|
+
* import Anthropic from '@anthropic-ai/sdk';
|
|
278
|
+
*
|
|
279
|
+
* // Works with OpenAI
|
|
280
|
+
* const openai = wrap(new OpenAI({ apiKey: '...' }), {
|
|
281
|
+
* apiKey: process.env.SCORECARD_API_KEY,
|
|
282
|
+
* projectId: '123'
|
|
283
|
+
* });
|
|
284
|
+
*
|
|
285
|
+
* // Works with Anthropic
|
|
286
|
+
* const claude = wrap(new Anthropic({ apiKey: '...' }), {
|
|
287
|
+
* apiKey: process.env.SCORECARD_API_KEY,
|
|
288
|
+
* projectId: '123'
|
|
289
|
+
* });
|
|
290
|
+
*
|
|
291
|
+
* // Use normally - traces are automatically sent to Scorecard
|
|
292
|
+
* const response = await openai.chat.completions.create({...});
|
|
293
|
+
* const response2 = await claude.messages.create({...});
|
|
294
|
+
* ```
|
|
295
|
+
*/
|
|
296
|
+
function wrap(client, config = {}) {
|
|
297
|
+
const projectId = initProvider(config);
|
|
298
|
+
if (!globalTracer) {
|
|
299
|
+
throw new error_1.ScorecardError('Failed to initialize tracer');
|
|
300
|
+
}
|
|
301
|
+
const tracer = globalTracer;
|
|
302
|
+
const provider = detectProvider(client);
|
|
303
|
+
// Track the path to determine if we should wrap this method
|
|
304
|
+
const createHandler = (target, path = []) => ({
|
|
305
|
+
get(target, prop) {
|
|
306
|
+
const value = target[prop];
|
|
307
|
+
// Check if this is a method we should wrap based on the path
|
|
308
|
+
const currentPath = [...path, prop.toString()];
|
|
309
|
+
const shouldWrap = (provider === 'openai' && currentPath.join('.') === 'chat.completions.create') ||
|
|
310
|
+
(provider === 'anthropic' &&
|
|
311
|
+
(currentPath.join('.') === 'messages.create' || currentPath.join('.') === 'messages.stream'));
|
|
312
|
+
// Intercept specific LLM methods
|
|
313
|
+
if (shouldWrap && typeof value === 'function') {
|
|
314
|
+
return async function (...args) {
|
|
315
|
+
const params = args[0] || {};
|
|
316
|
+
// Streaming if: 1) stream param is true, or 2) using the 'stream' method
|
|
317
|
+
const isStreaming = params.stream === true || prop === 'stream';
|
|
318
|
+
// Start span in the current active context (enables nesting)
|
|
319
|
+
const span = tracer.startSpan(`${provider}.request`, {}, api_1.context.active());
|
|
320
|
+
// Set request attributes (common to both providers)
|
|
321
|
+
span.setAttributes({
|
|
322
|
+
'gen_ai.system': provider,
|
|
323
|
+
'gen_ai.request.model': params.model || 'unknown',
|
|
324
|
+
'gen_ai.operation.name': 'chat',
|
|
325
|
+
...(params.temperature !== undefined && { 'gen_ai.request.temperature': params.temperature }),
|
|
326
|
+
...(params.max_tokens !== undefined && { 'gen_ai.request.max_tokens': params.max_tokens }),
|
|
327
|
+
...(params.top_p !== undefined && { 'gen_ai.request.top_p': params.top_p }),
|
|
328
|
+
});
|
|
329
|
+
// Store projectId as span attribute - our custom exporter will inject it
|
|
330
|
+
// into ResourceAttributes before export (where the backend expects it)
|
|
331
|
+
if (projectId) {
|
|
332
|
+
span.setAttribute('scorecard.project_id', projectId);
|
|
333
|
+
}
|
|
334
|
+
// Set prompt messages
|
|
335
|
+
if (params.messages) {
|
|
336
|
+
span.setAttribute('gen_ai.prompt.messages', JSON.stringify(params.messages));
|
|
337
|
+
}
|
|
338
|
+
// Execute within the span's context (enables nested spans to be children)
|
|
339
|
+
return api_1.context.with(api_1.trace.setSpan(api_1.context.active(), span), async () => {
|
|
340
|
+
try {
|
|
341
|
+
const result = await value.apply(target, args);
|
|
342
|
+
if (isStreaming) {
|
|
343
|
+
// For streaming, wrap the stream to collect metadata and end span when consumed
|
|
344
|
+
return new StreamWrapper(result, span, provider, params);
|
|
345
|
+
}
|
|
346
|
+
else {
|
|
347
|
+
// For non-streaming, set response attributes immediately
|
|
348
|
+
if (provider === 'openai') {
|
|
349
|
+
handleOpenAIResponse(span, result, params);
|
|
350
|
+
}
|
|
351
|
+
else if (provider === 'anthropic') {
|
|
352
|
+
handleAnthropicResponse(span, result, params);
|
|
353
|
+
}
|
|
354
|
+
return result;
|
|
355
|
+
}
|
|
356
|
+
}
|
|
357
|
+
catch (error) {
|
|
358
|
+
span.recordException(error);
|
|
359
|
+
throw error;
|
|
360
|
+
}
|
|
361
|
+
finally {
|
|
362
|
+
// Only end span for non-streaming (streaming ends in StreamWrapper)
|
|
363
|
+
if (!isStreaming) {
|
|
364
|
+
span.end();
|
|
365
|
+
}
|
|
366
|
+
}
|
|
367
|
+
});
|
|
368
|
+
};
|
|
369
|
+
}
|
|
370
|
+
// Recursively proxy nested objects, passing the path along
|
|
371
|
+
if (value && typeof value === 'object') {
|
|
372
|
+
return new Proxy(value, createHandler(value, currentPath));
|
|
373
|
+
}
|
|
374
|
+
// Return functions and primitives as-is
|
|
375
|
+
if (typeof value === 'function') {
|
|
376
|
+
return value.bind(target);
|
|
377
|
+
}
|
|
378
|
+
return value;
|
|
379
|
+
},
|
|
380
|
+
});
|
|
381
|
+
return new Proxy(client, createHandler(client, []));
|
|
382
|
+
}
|
|
383
|
+
// Backwards compatibility aliases
|
|
384
|
+
exports.wrapOpenAI = wrap;
|
|
385
|
+
exports.wrapAnthropic = wrap;
|
|
386
|
+
//# sourceMappingURL=wrapLLMs.js.map
|