@jarvis-agent/core 0.1.0 → 0.1.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs.js CHANGED
@@ -6119,7 +6119,7 @@ function withUserAgentSuffix(headers, ...userAgentSuffixParts) {
6119
6119
  }
6120
6120
 
6121
6121
  // src/version.ts
6122
- var VERSION$5 = "3.0.12" ;
6122
+ var VERSION$6 = "3.0.12" ;
6123
6123
  function loadApiKey$1({
6124
6124
  apiKey,
6125
6125
  environmentVariableName,
@@ -6468,7 +6468,7 @@ var postToApi$1 = async ({
6468
6468
  method: "POST",
6469
6469
  headers: withUserAgentSuffix(
6470
6470
  headers,
6471
- `ai-sdk/provider-utils/${VERSION$5}`,
6471
+ `ai-sdk/provider-utils/${VERSION$6}`,
6472
6472
  getRuntimeEnvironmentUserAgent()
6473
6473
  ),
6474
6474
  body: body.content,
@@ -12221,7 +12221,7 @@ var OpenAITranscriptionModel = class {
12221
12221
  };
12222
12222
 
12223
12223
  // src/version.ts
12224
- var VERSION$4 = "2.0.52" ;
12224
+ var VERSION$5 = "2.0.52" ;
12225
12225
 
12226
12226
  // src/openai-provider.ts
12227
12227
  function createOpenAI(options = {}) {
@@ -12244,7 +12244,7 @@ function createOpenAI(options = {}) {
12244
12244
  "OpenAI-Project": options.project,
12245
12245
  ...options.headers
12246
12246
  },
12247
- `ai-sdk/openai/${VERSION$4}`
12247
+ `ai-sdk/openai/${VERSION$5}`
12248
12248
  );
12249
12249
  const createChatModel = (modelId) => new OpenAIChatLanguageModel(modelId, {
12250
12250
  provider: `${providerName}.chat`,
@@ -12323,7 +12323,7 @@ createOpenAI();
12323
12323
  // src/anthropic-provider.ts
12324
12324
 
12325
12325
  // src/version.ts
12326
- var VERSION$3 = "2.0.33" ;
12326
+ var VERSION$4 = "2.0.33" ;
12327
12327
  var anthropicErrorDataSchema$1 = lazySchema(
12328
12328
  () => zodSchema(
12329
12329
  object({
@@ -15267,7 +15267,7 @@ function createAnthropic(options = {}) {
15267
15267
  }),
15268
15268
  ...options.headers
15269
15269
  },
15270
- `ai-sdk/anthropic/${VERSION$3}`
15270
+ `ai-sdk/anthropic/${VERSION$4}`
15271
15271
  );
15272
15272
  const createChatModel = (modelId) => {
15273
15273
  var _a2;
@@ -15307,7 +15307,7 @@ createAnthropic();
15307
15307
  // src/google-provider.ts
15308
15308
 
15309
15309
  // src/version.ts
15310
- var VERSION$2 = "2.0.23" ;
15310
+ var VERSION$3 = "2.0.23" ;
15311
15311
  var googleErrorDataSchema = lazySchema(
15312
15312
  () => zodSchema(
15313
15313
  object({
@@ -16796,7 +16796,7 @@ function createGoogleGenerativeAI(options = {}) {
16796
16796
  }),
16797
16797
  ...options.headers
16798
16798
  },
16799
- `ai-sdk/google/${VERSION$2}`
16799
+ `ai-sdk/google/${VERSION$3}`
16800
16800
  );
16801
16801
  const createChatModel = (modelId) => {
16802
16802
  var _a2;
@@ -18572,7 +18572,7 @@ function guessServiceRegion(url, headers) {
18572
18572
  // src/bedrock-provider.ts
18573
18573
 
18574
18574
  // src/version.ts
18575
- var VERSION$1 = "3.0.43" ;
18575
+ var VERSION$2 = "3.0.43" ;
18576
18576
 
18577
18577
  // src/bedrock-api-types.ts
18578
18578
  var BEDROCK_CACHE_POINT = {
@@ -20029,7 +20029,7 @@ function createSigV4FetchFunction(getCredentials, fetch = globalThis.fetch) {
20029
20029
  );
20030
20030
  const headersWithUserAgent = withUserAgentSuffix(
20031
20031
  originalHeaders,
20032
- `ai-sdk/amazon-bedrock/${VERSION$1}`,
20032
+ `ai-sdk/amazon-bedrock/${VERSION$2}`,
20033
20033
  getRuntimeEnvironmentUserAgent()
20034
20034
  );
20035
20035
  let effectiveBody = (_a = init == null ? void 0 : init.body) != null ? _a : void 0;
@@ -20086,7 +20086,7 @@ function createApiKeyFetchFunction(apiKey, fetch = globalThis.fetch) {
20086
20086
  const originalHeaders = extractHeaders(init == null ? void 0 : init.headers);
20087
20087
  const headersWithUserAgent = withUserAgentSuffix(
20088
20088
  originalHeaders,
20089
- `ai-sdk/amazon-bedrock/${VERSION$1}`,
20089
+ `ai-sdk/amazon-bedrock/${VERSION$2}`,
20090
20090
  getRuntimeEnvironmentUserAgent()
20091
20091
  );
20092
20092
  const finalHeaders = combineHeaders$1(headersWithUserAgent, {
@@ -20181,7 +20181,7 @@ Original error: ${errorMessage}`
20181
20181
  const getHeaders = () => {
20182
20182
  var _a;
20183
20183
  const baseHeaders = (_a = options.headers) != null ? _a : {};
20184
- return withUserAgentSuffix(baseHeaders, `ai-sdk/amazon-bedrock/${VERSION$1}`);
20184
+ return withUserAgentSuffix(baseHeaders, `ai-sdk/amazon-bedrock/${VERSION$2}`);
20185
20185
  };
20186
20186
  const createChatModel = (modelId) => new BedrockChatLanguageModel(modelId, {
20187
20187
  baseUrl: getBaseUrl,
@@ -23811,7 +23811,7 @@ var openaiCompatibleImageResponseSchema = object({
23811
23811
  });
23812
23812
 
23813
23813
  // src/version.ts
23814
- var VERSION = "1.0.22" ;
23814
+ var VERSION$1 = "1.0.22" ;
23815
23815
 
23816
23816
  // src/openai-compatible-provider.ts
23817
23817
  function createOpenAICompatible(options) {
@@ -23821,7 +23821,7 @@ function createOpenAICompatible(options) {
23821
23821
  ...options.apiKey && { Authorization: `Bearer ${options.apiKey}` },
23822
23822
  ...options.headers
23823
23823
  };
23824
- const getHeaders = () => withUserAgentSuffix(headers, `ai-sdk/openai-compatible/${VERSION}`);
23824
+ const getHeaders = () => withUserAgentSuffix(headers, `ai-sdk/openai-compatible/${VERSION$1}`);
23825
23825
  const getCommonModelConfig = (modelType) => ({
23826
23826
  provider: `${providerName}.${modelType}`,
23827
23827
  url: ({ path }) => {
@@ -23857,6 +23857,99 @@ function createOpenAICompatible(options) {
23857
23857
  return provider;
23858
23858
  }
23859
23859
 
23860
+ // src/deepseek-provider.ts
23861
+ var buildDeepseekMetadata = (usage) => {
23862
+ var _a, _b;
23863
+ return usage == null ? void 0 : {
23864
+ deepseek: {
23865
+ promptCacheHitTokens: (_a = usage.prompt_cache_hit_tokens) != null ? _a : NaN,
23866
+ promptCacheMissTokens: (_b = usage.prompt_cache_miss_tokens) != null ? _b : NaN
23867
+ }
23868
+ };
23869
+ };
23870
+ var deepSeekMetadataExtractor = {
23871
+ extractMetadata: async ({ parsedBody }) => {
23872
+ const parsed = await safeValidateTypes$1({
23873
+ value: parsedBody,
23874
+ schema: deepSeekResponseSchema
23875
+ });
23876
+ return !parsed.success || parsed.value.usage == null ? void 0 : buildDeepseekMetadata(parsed.value.usage);
23877
+ },
23878
+ createStreamExtractor: () => {
23879
+ let usage;
23880
+ return {
23881
+ processChunk: async (chunk) => {
23882
+ var _a, _b;
23883
+ const parsed = await safeValidateTypes$1({
23884
+ value: chunk,
23885
+ schema: deepSeekStreamChunkSchema
23886
+ });
23887
+ if (parsed.success && ((_b = (_a = parsed.value.choices) == null ? void 0 : _a[0]) == null ? void 0 : _b.finish_reason) === "stop" && parsed.value.usage) {
23888
+ usage = parsed.value.usage;
23889
+ }
23890
+ },
23891
+ buildMetadata: () => buildDeepseekMetadata(usage)
23892
+ };
23893
+ }
23894
+ };
23895
+ var deepSeekUsageSchema = object({
23896
+ prompt_cache_hit_tokens: number$1().nullish(),
23897
+ prompt_cache_miss_tokens: number$1().nullish()
23898
+ });
23899
+ var deepSeekResponseSchema = object({
23900
+ usage: deepSeekUsageSchema.nullish()
23901
+ });
23902
+ var deepSeekStreamChunkSchema = object({
23903
+ choices: array(
23904
+ object({
23905
+ finish_reason: string().nullish()
23906
+ })
23907
+ ).nullish(),
23908
+ usage: deepSeekUsageSchema.nullish()
23909
+ });
23910
+
23911
+ // src/version.ts
23912
+ var VERSION = "1.0.23" ;
23913
+
23914
+ // src/deepseek-provider.ts
23915
+ function createDeepSeek(options = {}) {
23916
+ var _a;
23917
+ const baseURL = withoutTrailingSlash$1(
23918
+ (_a = options.baseURL) != null ? _a : "https://api.deepseek.com/v1"
23919
+ );
23920
+ const getHeaders = () => withUserAgentSuffix(
23921
+ {
23922
+ Authorization: `Bearer ${loadApiKey$1({
23923
+ apiKey: options.apiKey,
23924
+ environmentVariableName: "DEEPSEEK_API_KEY",
23925
+ description: "DeepSeek API key"
23926
+ })}`,
23927
+ ...options.headers
23928
+ },
23929
+ `ai-sdk/deepseek/${VERSION}`
23930
+ );
23931
+ const createLanguageModel = (modelId) => {
23932
+ return new OpenAICompatibleChatLanguageModel(modelId, {
23933
+ provider: `deepseek.chat`,
23934
+ url: ({ path }) => `${baseURL}${path}`,
23935
+ headers: getHeaders,
23936
+ fetch: options.fetch,
23937
+ metadataExtractor: deepSeekMetadataExtractor
23938
+ });
23939
+ };
23940
+ const provider = (modelId) => createLanguageModel(modelId);
23941
+ provider.languageModel = createLanguageModel;
23942
+ provider.chat = createLanguageModel;
23943
+ provider.textEmbeddingModel = (modelId) => {
23944
+ throw new NoSuchModelError({ modelId, modelType: "textEmbeddingModel" });
23945
+ };
23946
+ provider.imageModel = (modelId) => {
23947
+ throw new NoSuchModelError({ modelId, modelType: "imageModel" });
23948
+ };
23949
+ return provider;
23950
+ }
23951
+ createDeepSeek();
23952
+
23860
23953
  class Context {
23861
23954
  constructor(taskId, config, agents, chain) {
23862
23955
  this.conversation = [];
@@ -30781,6 +30874,14 @@ class RetryLanguageModel {
30781
30874
  compatibility: llm.config?.compatibility,
30782
30875
  }).languageModel(llm.model);
30783
30876
  }
30877
+ else if (llm.provider == "deepseek") {
30878
+ return createDeepSeek({
30879
+ apiKey: apiKey,
30880
+ baseURL: baseURL,
30881
+ fetch: llm.fetch,
30882
+ headers: llm.config?.headers,
30883
+ }).languageModel(llm.model);
30884
+ }
30784
30885
  else {
30785
30886
  return llm.provider.languageModel(llm.model);
30786
30887
  }