@jarvis-agent/core 0.1.0 → 0.1.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.esm.js CHANGED
@@ -6113,7 +6113,7 @@ function withUserAgentSuffix(headers, ...userAgentSuffixParts) {
6113
6113
  }
6114
6114
 
6115
6115
  // src/version.ts
6116
- var VERSION$5 = "3.0.12" ;
6116
+ var VERSION$6 = "3.0.12" ;
6117
6117
  function loadApiKey$1({
6118
6118
  apiKey,
6119
6119
  environmentVariableName,
@@ -6462,7 +6462,7 @@ var postToApi$1 = async ({
6462
6462
  method: "POST",
6463
6463
  headers: withUserAgentSuffix(
6464
6464
  headers,
6465
- `ai-sdk/provider-utils/${VERSION$5}`,
6465
+ `ai-sdk/provider-utils/${VERSION$6}`,
6466
6466
  getRuntimeEnvironmentUserAgent()
6467
6467
  ),
6468
6468
  body: body.content,
@@ -12215,7 +12215,7 @@ var OpenAITranscriptionModel = class {
12215
12215
  };
12216
12216
 
12217
12217
  // src/version.ts
12218
- var VERSION$4 = "2.0.52" ;
12218
+ var VERSION$5 = "2.0.52" ;
12219
12219
 
12220
12220
  // src/openai-provider.ts
12221
12221
  function createOpenAI(options = {}) {
@@ -12238,7 +12238,7 @@ function createOpenAI(options = {}) {
12238
12238
  "OpenAI-Project": options.project,
12239
12239
  ...options.headers
12240
12240
  },
12241
- `ai-sdk/openai/${VERSION$4}`
12241
+ `ai-sdk/openai/${VERSION$5}`
12242
12242
  );
12243
12243
  const createChatModel = (modelId) => new OpenAIChatLanguageModel(modelId, {
12244
12244
  provider: `${providerName}.chat`,
@@ -12317,7 +12317,7 @@ createOpenAI();
12317
12317
  // src/anthropic-provider.ts
12318
12318
 
12319
12319
  // src/version.ts
12320
- var VERSION$3 = "2.0.33" ;
12320
+ var VERSION$4 = "2.0.33" ;
12321
12321
  var anthropicErrorDataSchema$1 = lazySchema(
12322
12322
  () => zodSchema(
12323
12323
  object({
@@ -15261,7 +15261,7 @@ function createAnthropic(options = {}) {
15261
15261
  }),
15262
15262
  ...options.headers
15263
15263
  },
15264
- `ai-sdk/anthropic/${VERSION$3}`
15264
+ `ai-sdk/anthropic/${VERSION$4}`
15265
15265
  );
15266
15266
  const createChatModel = (modelId) => {
15267
15267
  var _a2;
@@ -15301,7 +15301,7 @@ createAnthropic();
15301
15301
  // src/google-provider.ts
15302
15302
 
15303
15303
  // src/version.ts
15304
- var VERSION$2 = "2.0.23" ;
15304
+ var VERSION$3 = "2.0.23" ;
15305
15305
  var googleErrorDataSchema = lazySchema(
15306
15306
  () => zodSchema(
15307
15307
  object({
@@ -16790,7 +16790,7 @@ function createGoogleGenerativeAI(options = {}) {
16790
16790
  }),
16791
16791
  ...options.headers
16792
16792
  },
16793
- `ai-sdk/google/${VERSION$2}`
16793
+ `ai-sdk/google/${VERSION$3}`
16794
16794
  );
16795
16795
  const createChatModel = (modelId) => {
16796
16796
  var _a2;
@@ -18537,7 +18537,7 @@ function guessServiceRegion(url, headers) {
18537
18537
  // src/bedrock-provider.ts
18538
18538
 
18539
18539
  // src/version.ts
18540
- var VERSION$1 = "3.0.43" ;
18540
+ var VERSION$2 = "3.0.43" ;
18541
18541
 
18542
18542
  // src/bedrock-api-types.ts
18543
18543
  var BEDROCK_CACHE_POINT = {
@@ -19994,7 +19994,7 @@ function createSigV4FetchFunction(getCredentials, fetch = globalThis.fetch) {
19994
19994
  );
19995
19995
  const headersWithUserAgent = withUserAgentSuffix(
19996
19996
  originalHeaders,
19997
- `ai-sdk/amazon-bedrock/${VERSION$1}`,
19997
+ `ai-sdk/amazon-bedrock/${VERSION$2}`,
19998
19998
  getRuntimeEnvironmentUserAgent()
19999
19999
  );
20000
20000
  let effectiveBody = (_a = init == null ? void 0 : init.body) != null ? _a : void 0;
@@ -20051,7 +20051,7 @@ function createApiKeyFetchFunction(apiKey, fetch = globalThis.fetch) {
20051
20051
  const originalHeaders = extractHeaders(init == null ? void 0 : init.headers);
20052
20052
  const headersWithUserAgent = withUserAgentSuffix(
20053
20053
  originalHeaders,
20054
- `ai-sdk/amazon-bedrock/${VERSION$1}`,
20054
+ `ai-sdk/amazon-bedrock/${VERSION$2}`,
20055
20055
  getRuntimeEnvironmentUserAgent()
20056
20056
  );
20057
20057
  const finalHeaders = combineHeaders$1(headersWithUserAgent, {
@@ -20146,7 +20146,7 @@ Original error: ${errorMessage}`
20146
20146
  const getHeaders = () => {
20147
20147
  var _a;
20148
20148
  const baseHeaders = (_a = options.headers) != null ? _a : {};
20149
- return withUserAgentSuffix(baseHeaders, `ai-sdk/amazon-bedrock/${VERSION$1}`);
20149
+ return withUserAgentSuffix(baseHeaders, `ai-sdk/amazon-bedrock/${VERSION$2}`);
20150
20150
  };
20151
20151
  const createChatModel = (modelId) => new BedrockChatLanguageModel(modelId, {
20152
20152
  baseUrl: getBaseUrl,
@@ -23776,7 +23776,7 @@ var openaiCompatibleImageResponseSchema = object({
23776
23776
  });
23777
23777
 
23778
23778
  // src/version.ts
23779
- var VERSION = "1.0.22" ;
23779
+ var VERSION$1 = "1.0.22" ;
23780
23780
 
23781
23781
  // src/openai-compatible-provider.ts
23782
23782
  function createOpenAICompatible(options) {
@@ -23786,7 +23786,7 @@ function createOpenAICompatible(options) {
23786
23786
  ...options.apiKey && { Authorization: `Bearer ${options.apiKey}` },
23787
23787
  ...options.headers
23788
23788
  };
23789
- const getHeaders = () => withUserAgentSuffix(headers, `ai-sdk/openai-compatible/${VERSION}`);
23789
+ const getHeaders = () => withUserAgentSuffix(headers, `ai-sdk/openai-compatible/${VERSION$1}`);
23790
23790
  const getCommonModelConfig = (modelType) => ({
23791
23791
  provider: `${providerName}.${modelType}`,
23792
23792
  url: ({ path }) => {
@@ -23822,6 +23822,99 @@ function createOpenAICompatible(options) {
23822
23822
  return provider;
23823
23823
  }
23824
23824
 
23825
+ // src/deepseek-provider.ts
23826
+ var buildDeepseekMetadata = (usage) => {
23827
+ var _a, _b;
23828
+ return usage == null ? void 0 : {
23829
+ deepseek: {
23830
+ promptCacheHitTokens: (_a = usage.prompt_cache_hit_tokens) != null ? _a : NaN,
23831
+ promptCacheMissTokens: (_b = usage.prompt_cache_miss_tokens) != null ? _b : NaN
23832
+ }
23833
+ };
23834
+ };
23835
+ var deepSeekMetadataExtractor = {
23836
+ extractMetadata: async ({ parsedBody }) => {
23837
+ const parsed = await safeValidateTypes$1({
23838
+ value: parsedBody,
23839
+ schema: deepSeekResponseSchema
23840
+ });
23841
+ return !parsed.success || parsed.value.usage == null ? void 0 : buildDeepseekMetadata(parsed.value.usage);
23842
+ },
23843
+ createStreamExtractor: () => {
23844
+ let usage;
23845
+ return {
23846
+ processChunk: async (chunk) => {
23847
+ var _a, _b;
23848
+ const parsed = await safeValidateTypes$1({
23849
+ value: chunk,
23850
+ schema: deepSeekStreamChunkSchema
23851
+ });
23852
+ if (parsed.success && ((_b = (_a = parsed.value.choices) == null ? void 0 : _a[0]) == null ? void 0 : _b.finish_reason) === "stop" && parsed.value.usage) {
23853
+ usage = parsed.value.usage;
23854
+ }
23855
+ },
23856
+ buildMetadata: () => buildDeepseekMetadata(usage)
23857
+ };
23858
+ }
23859
+ };
23860
+ var deepSeekUsageSchema = object({
23861
+ prompt_cache_hit_tokens: number$1().nullish(),
23862
+ prompt_cache_miss_tokens: number$1().nullish()
23863
+ });
23864
+ var deepSeekResponseSchema = object({
23865
+ usage: deepSeekUsageSchema.nullish()
23866
+ });
23867
+ var deepSeekStreamChunkSchema = object({
23868
+ choices: array(
23869
+ object({
23870
+ finish_reason: string().nullish()
23871
+ })
23872
+ ).nullish(),
23873
+ usage: deepSeekUsageSchema.nullish()
23874
+ });
23875
+
23876
+ // src/version.ts
23877
+ var VERSION = "1.0.23" ;
23878
+
23879
+ // src/deepseek-provider.ts
23880
+ function createDeepSeek(options = {}) {
23881
+ var _a;
23882
+ const baseURL = withoutTrailingSlash$1(
23883
+ (_a = options.baseURL) != null ? _a : "https://api.deepseek.com/v1"
23884
+ );
23885
+ const getHeaders = () => withUserAgentSuffix(
23886
+ {
23887
+ Authorization: `Bearer ${loadApiKey$1({
23888
+ apiKey: options.apiKey,
23889
+ environmentVariableName: "DEEPSEEK_API_KEY",
23890
+ description: "DeepSeek API key"
23891
+ })}`,
23892
+ ...options.headers
23893
+ },
23894
+ `ai-sdk/deepseek/${VERSION}`
23895
+ );
23896
+ const createLanguageModel = (modelId) => {
23897
+ return new OpenAICompatibleChatLanguageModel(modelId, {
23898
+ provider: `deepseek.chat`,
23899
+ url: ({ path }) => `${baseURL}${path}`,
23900
+ headers: getHeaders,
23901
+ fetch: options.fetch,
23902
+ metadataExtractor: deepSeekMetadataExtractor
23903
+ });
23904
+ };
23905
+ const provider = (modelId) => createLanguageModel(modelId);
23906
+ provider.languageModel = createLanguageModel;
23907
+ provider.chat = createLanguageModel;
23908
+ provider.textEmbeddingModel = (modelId) => {
23909
+ throw new NoSuchModelError({ modelId, modelType: "textEmbeddingModel" });
23910
+ };
23911
+ provider.imageModel = (modelId) => {
23912
+ throw new NoSuchModelError({ modelId, modelType: "imageModel" });
23913
+ };
23914
+ return provider;
23915
+ }
23916
+ createDeepSeek();
23917
+
23825
23918
  class Context {
23826
23919
  constructor(taskId, config, agents, chain) {
23827
23920
  this.conversation = [];
@@ -30746,6 +30839,14 @@ class RetryLanguageModel {
30746
30839
  compatibility: llm.config?.compatibility,
30747
30840
  }).languageModel(llm.model);
30748
30841
  }
30842
+ else if (llm.provider == "deepseek") {
30843
+ return createDeepSeek({
30844
+ apiKey: apiKey,
30845
+ baseURL: baseURL,
30846
+ fetch: llm.fetch,
30847
+ headers: llm.config?.headers,
30848
+ }).languageModel(llm.model);
30849
+ }
30749
30850
  else {
30750
30851
  return llm.provider.languageModel(llm.model);
30751
30852
  }