@blaxel/llamaindex 0.2.36-dev.185 → 0.2.36-dev.190

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,3 @@
1
+ import "./telemetry.js";
2
+ export * from "./model.js";
3
+ export * from "./tools.js";
@@ -0,0 +1,135 @@
1
+ import { authenticate, getModelMetadata, handleDynamicImportError, settings } from "@blaxel/core";
2
+ import { anthropic, AnthropicSession } from "@llamaindex/anthropic";
3
+ import { Gemini } from "@llamaindex/google";
4
+ import { openai } from "@llamaindex/openai";
5
+ // Custom LLM provider that refreshes auth on each call
6
+ class BlaxelLLM {
7
+ model;
8
+ options;
9
+ modelData;
10
+ type;
11
+ _metadata;
12
+ constructor(model, modelData, options) {
13
+ this.model = model;
14
+ this.modelData = modelData;
15
+ this.options = options;
16
+ this.type = modelData?.spec?.runtime?.type || "openai";
17
+ }
18
+ get supportToolCall() {
19
+ return true;
20
+ }
21
+ get metadata() {
22
+ // Return cached metadata or default values
23
+ if (this._metadata) {
24
+ return this._metadata;
25
+ }
26
+ // Return default values with overrides from options
27
+ return {
28
+ model: this.model,
29
+ temperature: this.options?.temperature ?? 0,
30
+ topP: this.options?.topP ?? 1,
31
+ maxTokens: this.options?.maxTokens ?? undefined,
32
+ contextWindow: this.options?.contextWindow ?? 4096,
33
+ tokenizer: undefined, // Let the underlying LLM handle tokenizer
34
+ structuredOutput: this.options?.structuredOutput ?? false,
35
+ };
36
+ }
37
+ async ensureMetadata() {
38
+ if (!this._metadata) {
39
+ const llm = await this.createLLM();
40
+ this._metadata = llm.metadata;
41
+ }
42
+ }
43
+ async createLLM() {
44
+ await authenticate();
45
+ const url = `${settings.runUrl}/${settings.workspace}/models/${this.model}`;
46
+ if (this.type === "mistral") {
47
+ return openai({
48
+ model: this.modelData?.spec?.runtime?.model,
49
+ apiKey: settings.token,
50
+ baseURL: `${url}/v1`,
51
+ ...this.options,
52
+ });
53
+ }
54
+ if (this.type === "anthropic") {
55
+ const llm = anthropic({
56
+ model: this.modelData?.spec?.runtime?.model,
57
+ session: new AnthropicSession({
58
+ baseURL: url,
59
+ defaultHeaders: settings.headers,
60
+ }),
61
+ ...this.options,
62
+ });
63
+ return {
64
+ ...llm,
65
+ supportToolCall: true,
66
+ };
67
+ }
68
+ if (this.type === "cohere") {
69
+ const llm = openai({
70
+ model: this.modelData?.spec?.runtime?.model,
71
+ apiKey: settings.token,
72
+ baseURL: `${url}/compatibility/v1`,
73
+ ...this.options,
74
+ });
75
+ return {
76
+ ...llm,
77
+ supportToolCall: true,
78
+ };
79
+ }
80
+ if (this.type === "gemini") {
81
+ process.env.GOOGLE_API_KEY = process.env.GOOGLE_API_KEY || "THIS_IS_A_DUMMY_KEY_FOR_LLAMAINDEX";
82
+ const llm = new Gemini({
83
+ apiKey: settings.token,
84
+ model: this.modelData?.spec?.runtime?.model,
85
+ httpOptions: {
86
+ baseUrl: url,
87
+ headers: settings.headers,
88
+ },
89
+ ...this.options,
90
+ });
91
+ return llm;
92
+ }
93
+ return openai({
94
+ model: this.modelData?.spec?.runtime?.model,
95
+ apiKey: settings.token,
96
+ baseURL: `${url}/v1`,
97
+ ...this.options,
98
+ });
99
+ }
100
+ async chat(params) {
101
+ await this.ensureMetadata();
102
+ const llm = await this.createLLM();
103
+ // Type guard to handle overloads
104
+ if ('stream' in params && params.stream === true) {
105
+ return llm.chat(params);
106
+ }
107
+ else {
108
+ return llm.chat(params);
109
+ }
110
+ }
111
+ async complete(params) {
112
+ await this.ensureMetadata();
113
+ const llm = await this.createLLM();
114
+ // Type guard to handle overloads
115
+ if ('stream' in params && params.stream === true) {
116
+ return llm.complete(params);
117
+ }
118
+ else {
119
+ return llm.complete(params);
120
+ }
121
+ }
122
+ }
123
+ export const blModel = async (model, options) => {
124
+ const modelData = await getModelMetadata(model);
125
+ if (!modelData) {
126
+ throw new Error(`Model ${model} not found`);
127
+ }
128
+ try {
129
+ return new BlaxelLLM(model, modelData, options);
130
+ }
131
+ catch (err) {
132
+ handleDynamicImportError(err);
133
+ throw err;
134
+ }
135
+ };
@@ -0,0 +1 @@
1
+ {"type":"module"}
@@ -0,0 +1,23 @@
1
+ import { logger } from "@blaxel/core";
2
+ import { registerInstrumentations } from "@opentelemetry/instrumentation";
3
+ // Safely initialize LlamaIndex telemetry instrumentation
4
+ async function initializeTelemetry() {
5
+ try {
6
+ const { LlamaIndexInstrumentation } = await import("@traceloop/instrumentation-llamaindex");
7
+ const llamaindex = new LlamaIndexInstrumentation();
8
+ // Try to enable the instrumentation
9
+ llamaindex.enable();
10
+ registerInstrumentations({
11
+ instrumentations: [llamaindex],
12
+ });
13
+ }
14
+ catch (error) {
15
+ // Log the error but don't crash the application
16
+ logger.warn("LlamaIndex telemetry instrumentation failed to initialize:", error instanceof Error ? error.message : String(error));
17
+ logger.warn("Continuing without LlamaIndex-specific telemetry...");
18
+ }
19
+ }
20
+ // Initialize telemetry asynchronously
21
+ initializeTelemetry().catch((error) => {
22
+ logger.warn("Failed to initialize telemetry:", error instanceof Error ? error.message : String(error));
23
+ });
@@ -0,0 +1,25 @@
1
+ // @ts-ignore - Required for build time due to missing types in 'llamaindex'
2
+ import { getTool, handleDynamicImportError } from "@blaxel/core";
3
+ import { tool } from "llamaindex";
4
+ export const blTool = async (name, options) => {
5
+ try {
6
+ const blaxelTool = await getTool(name, options);
7
+ const tools = blaxelTool.map((t) => {
8
+ // @ts-ignore - Required for build time due to missing types in 'llamaindex'
9
+ return tool(t.call.bind(t), {
10
+ name: t.name,
11
+ description: t.description,
12
+ parameters: t.inputSchema,
13
+ });
14
+ });
15
+ return tools;
16
+ }
17
+ catch (err) {
18
+ handleDynamicImportError(err);
19
+ throw err;
20
+ }
21
+ };
22
+ export const blTools = async (names, ms) => {
23
+ const toolArrays = await Promise.all(names.map((n) => blTool(n, ms)));
24
+ return toolArrays.flat();
25
+ };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@blaxel/llamaindex",
3
- "version": "0.2.36-dev.185",
3
+ "version": "0.2.36-dev.190",
4
4
  "description": "Blaxel SDK for TypeScript",
5
5
  "license": "MIT",
6
6
  "author": "Blaxel, INC (https://blaxel.ai)",
@@ -17,38 +17,22 @@
17
17
  "agent",
18
18
  "mcp"
19
19
  ],
20
- "main": "dist/index.js",
21
- "module": "dist/index.js",
22
- "types": "dist/index.d.ts",
20
+ "main": "./dist/cjs/index.js",
21
+ "module": "./dist/esm/index.js",
22
+ "types": "./dist/cjs/types/index.d.ts",
23
23
  "exports": {
24
24
  ".": {
25
- "import": {
26
- "types": "./dist/index.d.ts",
27
- "default": "./dist/index.js"
28
- },
29
- "require": {
30
- "types": "./dist/index.d.ts",
31
- "default": "./dist/index.js"
32
- }
25
+ "types": "./dist/cjs/types/index.d.ts",
26
+ "import": "./dist/esm/index.js",
27
+ "require": "./dist/cjs/index.js",
28
+ "default": "./dist/cjs/index.js"
33
29
  },
34
- "./*": {
35
- "import": {
36
- "types": "./dist/*.d.ts",
37
- "default": "./dist/*.js"
38
- },
39
- "require": {
40
- "types": "./dist/*.d.ts",
41
- "default": "./dist/*.js"
42
- }
43
- }
30
+ "./package.json": "./package.json"
44
31
  },
45
32
  "typesVersions": {
46
33
  "*": {
47
34
  ".": [
48
- "./dist/index.d.ts"
49
- ],
50
- "*": [
51
- "./dist/*"
35
+ "./dist/cjs/types/index.d.ts"
52
36
  ]
53
37
  }
54
38
  },
@@ -64,7 +48,7 @@
64
48
  "@opentelemetry/instrumentation": "^0.203.0",
65
49
  "@traceloop/instrumentation-llamaindex": "^0.14.0",
66
50
  "llamaindex": "^0.11.13",
67
- "@blaxel/core": "0.2.36-dev.185"
51
+ "@blaxel/core": "0.2.36-dev.190"
68
52
  },
69
53
  "devDependencies": {
70
54
  "@eslint/js": "^9.30.1",
@@ -75,6 +59,10 @@
75
59
  "scripts": {
76
60
  "lint": "eslint src/",
77
61
  "dev": "tsc --watch",
78
- "build": "tsc"
62
+ "build": "npm run build:cjs && npm run build:esm && npm run build:fix-esm && npm run build:esm-package",
63
+ "build:cjs": "NODE_OPTIONS='--max-old-space-size=8192' tsc -p tsconfig.cjs.json",
64
+ "build:esm": "NODE_OPTIONS='--max-old-space-size=8192' tsc -p tsconfig.esm.json",
65
+ "build:fix-esm": "node fix-esm-imports.js",
66
+ "build:esm-package": "echo '{\"type\":\"module\"}' > dist/esm/package.json"
79
67
  }
80
68
  }
package/dist/telemetry.js DELETED
@@ -1,25 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- const core_1 = require("@blaxel/core");
4
- const instrumentation_1 = require("@opentelemetry/instrumentation");
5
- // Safely initialize LlamaIndex telemetry instrumentation
6
- async function initializeTelemetry() {
7
- try {
8
- const { LlamaIndexInstrumentation } = await import("@traceloop/instrumentation-llamaindex");
9
- const llamaindex = new LlamaIndexInstrumentation();
10
- // Try to enable the instrumentation
11
- llamaindex.enable();
12
- (0, instrumentation_1.registerInstrumentations)({
13
- instrumentations: [llamaindex],
14
- });
15
- }
16
- catch (error) {
17
- // Log the error but don't crash the application
18
- core_1.logger.warn("LlamaIndex telemetry instrumentation failed to initialize:", error instanceof Error ? error.message : String(error));
19
- core_1.logger.warn("Continuing without LlamaIndex-specific telemetry...");
20
- }
21
- }
22
- // Initialize telemetry asynchronously
23
- initializeTelemetry().catch((error) => {
24
- core_1.logger.warn("Failed to initialize telemetry:", error instanceof Error ? error.message : String(error));
25
- });
File without changes
File without changes
File without changes
File without changes
File without changes