@blaxel/llamaindex 0.2.59-preview.41 → 0.2.59-preview.43

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/esm/model.js CHANGED
@@ -13,7 +13,7 @@ class BlaxelLLM {
13
13
  this.model = model;
14
14
  this.modelData = modelData;
15
15
  this.options = options;
16
- this.type = modelData?.spec?.runtime?.type || "openai";
16
+ this.type = modelData?.spec.runtime?.type || "openai";
17
17
  }
18
18
  get supportToolCall() {
19
19
  return true;
@@ -43,7 +43,7 @@ class BlaxelLLM {
43
43
  catch {
44
44
  // If metadata access fails (e.g., Gemini), use default metadata
45
45
  this._metadata = {
46
- model: this.modelData?.spec?.runtime?.model || this.model,
46
+ model: this.modelData?.spec.runtime?.model || this.model,
47
47
  temperature: this.options?.temperature ?? 0,
48
48
  topP: this.options?.topP ?? 1,
49
49
  maxTokens: this.options?.maxTokens ?? undefined,
@@ -93,7 +93,7 @@ class BlaxelLLM {
93
93
  };
94
94
  if (this.type === "mistral") {
95
95
  return openai({
96
- model: this.modelData?.spec?.runtime?.model,
96
+ model: this.modelData?.spec.runtime?.model,
97
97
  apiKey: currentToken,
98
98
  baseURL: `${url}/v1`,
99
99
  additionalSessionOptions: {
@@ -109,7 +109,7 @@ class BlaxelLLM {
109
109
  // Get fresh headers right before creating the session
110
110
  const anthropicHeaders = { ...settings.headers };
111
111
  const llm = anthropic({
112
- model: this.modelData?.spec?.runtime?.model,
112
+ model: this.modelData?.spec.runtime?.model,
113
113
  session: new AnthropicSession({
114
114
  baseURL: url,
115
115
  defaultHeaders: anthropicHeaders,
@@ -170,7 +170,7 @@ class BlaxelLLM {
170
170
  }
171
171
  if (this.type === "cohere") {
172
172
  const llm = openai({
173
- model: this.modelData?.spec?.runtime?.model,
173
+ model: this.modelData?.spec.runtime?.model,
174
174
  apiKey: currentToken,
175
175
  baseURL: `${url}/compatibility/v1`, // OpenAI compatibility endpoint
176
176
  additionalSessionOptions: {
@@ -187,7 +187,7 @@ class BlaxelLLM {
187
187
  process.env.GOOGLE_API_KEY = process.env.GOOGLE_API_KEY || "THIS_IS_A_DUMMY_KEY_FOR_LLAMAINDEX";
188
188
  const llm = new Gemini({
189
189
  apiKey: settings.token,
190
- model: this.modelData?.spec?.runtime?.model,
190
+ model: this.modelData?.spec.runtime?.model,
191
191
  httpOptions: {
192
192
  baseUrl: url,
193
193
  headers: settings.headers,
@@ -197,7 +197,7 @@ class BlaxelLLM {
197
197
  return llm;
198
198
  }
199
199
  return openai({
200
- model: this.modelData?.spec?.runtime?.model,
200
+ model: this.modelData?.spec.runtime?.model,
201
201
  apiKey: currentToken,
202
202
  baseURL: `${url}/v1`,
203
203
  additionalSessionOptions: {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@blaxel/llamaindex",
3
- "version": "0.2.59-preview.41",
3
+ "version": "0.2.59-preview.43",
4
4
  "description": "Blaxel SDK for TypeScript",
5
5
  "license": "MIT",
6
6
  "author": "Blaxel, INC (https://blaxel.ai)",
@@ -48,7 +48,7 @@
48
48
  "@opentelemetry/instrumentation": "^0.203.0",
49
49
  "@traceloop/instrumentation-llamaindex": "^0.14.0",
50
50
  "llamaindex": "^0.12.0",
51
- "@blaxel/core": "0.2.59-preview.41"
51
+ "@blaxel/core": "0.2.59-preview.43"
52
52
  },
53
53
  "devDependencies": {
54
54
  "@eslint/js": "^9.30.1",