@hongymagic/q 2026.330.0 → 2026.402.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/q.js +164 -37
  2. package/package.json +7 -7
package/dist/q.js CHANGED
@@ -14712,7 +14712,7 @@ import { parseArgs } from "node:util";
14712
14712
  // package.json
14713
14713
  var package_default = {
14714
14714
  name: "@hongymagic/q",
14715
- version: "2026.330.0",
14715
+ version: "2026.402.0",
14716
14716
  description: "Quick AI answers from the command line",
14717
14717
  main: "dist/q.js",
14718
14718
  type: "module",
@@ -14762,15 +14762,15 @@ var package_default = {
14762
14762
  "release:dry": "bun run scripts/release.ts --dry-run"
14763
14763
  },
14764
14764
  dependencies: {
14765
- "@ai-sdk/amazon-bedrock": "4.0.83",
14765
+ "@ai-sdk/amazon-bedrock": "4.0.85",
14766
14766
  "@ai-sdk/anthropic": "3.0.64",
14767
- "@ai-sdk/azure": "3.0.49",
14768
- "@ai-sdk/google": "3.0.53",
14767
+ "@ai-sdk/azure": "3.0.50",
14768
+ "@ai-sdk/google": "3.0.54",
14769
14769
  "@ai-sdk/groq": "3.0.31",
14770
- "@ai-sdk/openai": "3.0.48",
14770
+ "@ai-sdk/openai": "3.0.49",
14771
14771
  "@ai-sdk/openai-compatible": "2.0.37",
14772
14772
  "@t3-oss/env-core": "0.13.11",
14773
- ai: "6.0.140",
14773
+ ai: "6.0.142",
14774
14774
  clipboardy: "5.3.1",
14775
14775
  "env-paths": "4.0.0",
14776
14776
  "ollama-ai-provider-v2": "3.5.0",
@@ -14778,7 +14778,7 @@ var package_default = {
14778
14778
  zod: "4.3.6"
14779
14779
  },
14780
14780
  devDependencies: {
14781
- "@biomejs/biome": "2.4.9",
14781
+ "@biomejs/biome": "2.4.10",
14782
14782
  "@types/bun": "1.3.11",
14783
14783
  "@vitest/coverage-v8": "4.1.2",
14784
14784
  lefthook: "2.1.4",
@@ -43792,6 +43792,11 @@ async function convertToOpenAIResponsesInput({
43792
43792
  filename: (_a23 = item.filename) != null ? _a23 : "data",
43793
43793
  file_data: `data:${item.mediaType};base64,${item.data}`
43794
43794
  };
43795
+ case "file-url":
43796
+ return {
43797
+ type: "input_file",
43798
+ file_url: item.url
43799
+ };
43795
43800
  default:
43796
43801
  warnings.push({
43797
43802
  type: "other",
@@ -43850,6 +43855,12 @@ async function convertToOpenAIResponsesInput({
43850
43855
  file_data: `data:${item.mediaType};base64,${item.data}`
43851
43856
  };
43852
43857
  }
43858
+ case "file-url": {
43859
+ return {
43860
+ type: "input_file",
43861
+ file_url: item.url
43862
+ };
43863
+ }
43853
43864
  default: {
43854
43865
  warnings.push({
43855
43866
  type: "other",
@@ -46918,7 +46929,7 @@ var azureOpenaiTools = {
46918
46929
  imageGeneration,
46919
46930
  webSearchPreview
46920
46931
  };
46921
- var VERSION4 = "3.0.49";
46932
+ var VERSION4 = "3.0.50";
46922
46933
  function createAzure(options = {}) {
46923
46934
  var _a16;
46924
46935
  const getHeaders = () => {
@@ -49846,25 +49857,31 @@ async function convertToBedrockChatMessages(prompt, isMistral = false) {
49846
49857
  providerOptions: part.providerOptions,
49847
49858
  schema: bedrockReasoningMetadataSchema
49848
49859
  });
49849
- if (reasoningMetadata != null) {
49850
- if (reasoningMetadata.signature != null) {
49851
- bedrockContent.push({
49852
- reasoningContent: {
49853
- reasoningText: {
49854
- text: trimIfLast(isLastBlock, isLastMessage, isLastContentPart, part.text),
49855
- signature: reasoningMetadata.signature
49856
- }
49860
+ if ((reasoningMetadata == null ? undefined : reasoningMetadata.signature) != null) {
49861
+ bedrockContent.push({
49862
+ reasoningContent: {
49863
+ reasoningText: {
49864
+ text: part.text,
49865
+ signature: reasoningMetadata.signature
49857
49866
  }
49858
- });
49859
- } else if (reasoningMetadata.redactedData != null) {
49860
- bedrockContent.push({
49861
- reasoningContent: {
49862
- redactedReasoning: {
49863
- data: reasoningMetadata.redactedData
49864
- }
49867
+ }
49868
+ });
49869
+ } else if ((reasoningMetadata == null ? undefined : reasoningMetadata.redactedData) != null) {
49870
+ bedrockContent.push({
49871
+ reasoningContent: {
49872
+ redactedReasoning: {
49873
+ data: reasoningMetadata.redactedData
49865
49874
  }
49866
- });
49867
- }
49875
+ }
49876
+ });
49877
+ } else {
49878
+ bedrockContent.push({
49879
+ reasoningContent: {
49880
+ reasoningText: {
49881
+ text: trimIfLast(isLastBlock, isLastMessage, isLastContentPart, part.text)
49882
+ }
49883
+ }
49884
+ });
49868
49885
  }
49869
49886
  break;
49870
49887
  }
@@ -51081,7 +51098,7 @@ var bedrockImageResponseSchema = exports_external.object({
51081
51098
  details: exports_external.record(exports_external.string(), exports_external.unknown()).optional(),
51082
51099
  preview: exports_external.unknown().optional()
51083
51100
  });
51084
- var VERSION5 = "4.0.83";
51101
+ var VERSION5 = "4.0.85";
51085
51102
  function createSigV4FetchFunction(getCredentials, fetch2 = globalThis.fetch) {
51086
51103
  return async (input, init) => {
51087
51104
  var _a16, _b16;
@@ -51377,7 +51394,7 @@ function createBedrockProvider(config2, providerName) {
51377
51394
  }
51378
51395
 
51379
51396
  // node_modules/@ai-sdk/google/dist/index.mjs
51380
- var VERSION6 = "3.0.53";
51397
+ var VERSION6 = "3.0.54";
51381
51398
  var googleErrorDataSchema = lazySchema(() => zodSchema(exports_external.object({
51382
51399
  error: exports_external.object({
51383
51400
  code: exports_external.number().nullable(),
@@ -51971,7 +51988,12 @@ var googleLanguageModelOptions = lazySchema(() => zodSchema(exports_external.obj
51971
51988
  latitude: exports_external.number(),
51972
51989
  longitude: exports_external.number()
51973
51990
  }).optional()
51974
- }).optional()
51991
+ }).optional(),
51992
+ serviceTier: exports_external.enum([
51993
+ "SERVICE_TIER_STANDARD",
51994
+ "SERVICE_TIER_FLEX",
51995
+ "SERVICE_TIER_PRIORITY"
51996
+ ]).optional()
51975
51997
  })));
51976
51998
  function prepareTools4({
51977
51999
  tools,
@@ -52305,14 +52327,15 @@ var GoogleGenerativeAILanguageModel = class {
52305
52327
  retrievalConfig: googleOptions.retrievalConfig
52306
52328
  } : googleToolConfig,
52307
52329
  cachedContent: googleOptions == null ? undefined : googleOptions.cachedContent,
52308
- labels: googleOptions == null ? undefined : googleOptions.labels
52330
+ labels: googleOptions == null ? undefined : googleOptions.labels,
52331
+ serviceTier: googleOptions == null ? undefined : googleOptions.serviceTier
52309
52332
  },
52310
52333
  warnings: [...warnings, ...toolWarnings],
52311
52334
  providerOptionsName
52312
52335
  };
52313
52336
  }
52314
52337
  async doGenerate(options) {
52315
- var _a16, _b16, _c, _d, _e, _f, _g, _h, _i, _j, _k;
52338
+ var _a16, _b16, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l;
52316
52339
  const { args, warnings, providerOptionsName } = await this.getArgs(options);
52317
52340
  const mergedHeaders = combineHeaders(await resolve(this.config.headers), options.headers);
52318
52341
  const {
@@ -52426,7 +52449,8 @@ var GoogleGenerativeAILanguageModel = class {
52426
52449
  urlContextMetadata: (_i = candidate.urlContextMetadata) != null ? _i : null,
52427
52450
  safetyRatings: (_j = candidate.safetyRatings) != null ? _j : null,
52428
52451
  usageMetadata: usageMetadata != null ? usageMetadata : null,
52429
- finishMessage: (_k = candidate.finishMessage) != null ? _k : null
52452
+ finishMessage: (_k = candidate.finishMessage) != null ? _k : null,
52453
+ serviceTier: (_l = response.serviceTier) != null ? _l : null
52430
52454
  }
52431
52455
  },
52432
52456
  request: { body: args },
@@ -52456,6 +52480,7 @@ var GoogleGenerativeAILanguageModel = class {
52456
52480
  let providerMetadata = undefined;
52457
52481
  let lastGroundingMetadata = null;
52458
52482
  let lastUrlContextMetadata = null;
52483
+ let serviceTier = null;
52459
52484
  const generateId3 = this.config.generateId;
52460
52485
  let hasToolCalls = false;
52461
52486
  let currentTextBlockId = null;
@@ -52482,6 +52507,9 @@ var GoogleGenerativeAILanguageModel = class {
52482
52507
  if (usageMetadata != null) {
52483
52508
  usage = usageMetadata;
52484
52509
  }
52510
+ if (value.serviceTier != null) {
52511
+ serviceTier = value.serviceTier;
52512
+ }
52485
52513
  const candidate = (_a16 = value.candidates) == null ? undefined : _a16[0];
52486
52514
  if (candidate == null) {
52487
52515
  return;
@@ -52673,7 +52701,8 @@ var GoogleGenerativeAILanguageModel = class {
52673
52701
  urlContextMetadata: lastUrlContextMetadata,
52674
52702
  safetyRatings: (_f = candidate.safetyRatings) != null ? _f : null,
52675
52703
  usageMetadata: usageMetadata != null ? usageMetadata : null,
52676
- finishMessage: (_g = candidate.finishMessage) != null ? _g : null
52704
+ finishMessage: (_g = candidate.finishMessage) != null ? _g : null,
52705
+ serviceTier
52677
52706
  }
52678
52707
  };
52679
52708
  }
@@ -52926,7 +52955,8 @@ var responseSchema = lazySchema(() => zodSchema(exports_external.object({
52926
52955
  promptFeedback: exports_external.object({
52927
52956
  blockReason: exports_external.string().nullish(),
52928
52957
  safetyRatings: exports_external.array(getSafetyRatingSchema()).nullish()
52929
- }).nullish()
52958
+ }).nullish(),
52959
+ serviceTier: exports_external.string().nullish()
52930
52960
  })));
52931
52961
  var chunkSchema = lazySchema(() => zodSchema(exports_external.object({
52932
52962
  candidates: exports_external.array(exports_external.object({
@@ -52941,7 +52971,8 @@ var chunkSchema = lazySchema(() => zodSchema(exports_external.object({
52941
52971
  promptFeedback: exports_external.object({
52942
52972
  blockReason: exports_external.string().nullish(),
52943
52973
  safetyRatings: exports_external.array(getSafetyRatingSchema()).nullish()
52944
- }).nullish()
52974
+ }).nullish(),
52975
+ serviceTier: exports_external.string().nullish()
52945
52976
  })));
52946
52977
  var codeExecution = createProviderToolFactoryWithOutputSchema({
52947
52978
  id: "google.code_execution",
@@ -58317,6 +58348,11 @@ async function convertToOpenAIResponsesInput2({
58317
58348
  filename: (_a23 = item.filename) != null ? _a23 : "data",
58318
58349
  file_data: `data:${item.mediaType};base64,${item.data}`
58319
58350
  };
58351
+ case "file-url":
58352
+ return {
58353
+ type: "input_file",
58354
+ file_url: item.url
58355
+ };
58320
58356
  default:
58321
58357
  warnings.push({
58322
58358
  type: "other",
@@ -58375,6 +58411,12 @@ async function convertToOpenAIResponsesInput2({
58375
58411
  file_data: `data:${item.mediaType};base64,${item.data}`
58376
58412
  };
58377
58413
  }
58414
+ case "file-url": {
58415
+ return {
58416
+ type: "input_file",
58417
+ file_url: item.url
58418
+ };
58419
+ }
58378
58420
  default: {
58379
58421
  warnings.push({
58380
58422
  type: "other",
@@ -61504,7 +61546,7 @@ var OpenAITranscriptionModel2 = class {
61504
61546
  };
61505
61547
  }
61506
61548
  };
61507
- var VERSION8 = "3.0.48";
61549
+ var VERSION8 = "3.0.49";
61508
61550
  function createOpenAI(options = {}) {
61509
61551
  var _a16, _b16;
61510
61552
  const baseURL = (_a16 = withoutTrailingSlash(loadOptionalSetting({
@@ -63840,6 +63882,81 @@ var gatewaySpendReportResponseSchema = lazySchema(() => zodSchema(exports_extern
63840
63882
  ...request_count !== undefined ? { requestCount: request_count } : {}
63841
63883
  })))
63842
63884
  })));
63885
+ var GatewayGenerationInfoFetcher = class {
63886
+ constructor(config2) {
63887
+ this.config = config2;
63888
+ }
63889
+ async getGenerationInfo(params) {
63890
+ try {
63891
+ const baseUrl = new URL(this.config.baseURL);
63892
+ const { value } = await getFromApi({
63893
+ url: `${baseUrl.origin}/v1/generation?id=${encodeURIComponent(params.id)}`,
63894
+ headers: await resolve(this.config.headers()),
63895
+ successfulResponseHandler: createJsonResponseHandler(gatewayGenerationInfoResponseSchema),
63896
+ failedResponseHandler: createJsonErrorResponseHandler({
63897
+ errorSchema: exports_external.any(),
63898
+ errorToMessage: (data) => data
63899
+ }),
63900
+ fetch: this.config.fetch
63901
+ });
63902
+ return value;
63903
+ } catch (error48) {
63904
+ throw await asGatewayError(error48);
63905
+ }
63906
+ }
63907
+ };
63908
+ var gatewayGenerationInfoResponseSchema = lazySchema(() => zodSchema(exports_external.object({
63909
+ data: exports_external.object({
63910
+ id: exports_external.string(),
63911
+ total_cost: exports_external.number(),
63912
+ upstream_inference_cost: exports_external.number(),
63913
+ usage: exports_external.number(),
63914
+ created_at: exports_external.string(),
63915
+ model: exports_external.string(),
63916
+ is_byok: exports_external.boolean(),
63917
+ provider_name: exports_external.string(),
63918
+ streamed: exports_external.boolean(),
63919
+ finish_reason: exports_external.string(),
63920
+ latency: exports_external.number(),
63921
+ generation_time: exports_external.number(),
63922
+ native_tokens_prompt: exports_external.number(),
63923
+ native_tokens_completion: exports_external.number(),
63924
+ native_tokens_reasoning: exports_external.number(),
63925
+ native_tokens_cached: exports_external.number(),
63926
+ native_tokens_cache_creation: exports_external.number(),
63927
+ billable_web_search_calls: exports_external.number()
63928
+ }).transform(({
63929
+ total_cost,
63930
+ upstream_inference_cost,
63931
+ created_at,
63932
+ is_byok,
63933
+ provider_name,
63934
+ finish_reason,
63935
+ generation_time,
63936
+ native_tokens_prompt,
63937
+ native_tokens_completion,
63938
+ native_tokens_reasoning,
63939
+ native_tokens_cached,
63940
+ native_tokens_cache_creation,
63941
+ billable_web_search_calls,
63942
+ ...rest
63943
+ }) => ({
63944
+ ...rest,
63945
+ totalCost: total_cost,
63946
+ upstreamInferenceCost: upstream_inference_cost,
63947
+ createdAt: created_at,
63948
+ isByok: is_byok,
63949
+ providerName: provider_name,
63950
+ finishReason: finish_reason,
63951
+ generationTime: generation_time,
63952
+ promptTokens: native_tokens_prompt,
63953
+ completionTokens: native_tokens_completion,
63954
+ reasoningTokens: native_tokens_reasoning,
63955
+ cachedTokens: native_tokens_cached,
63956
+ cacheCreationTokens: native_tokens_cache_creation,
63957
+ billableWebSearchCalls: billable_web_search_calls
63958
+ }))
63959
+ }).transform(({ data }) => data)));
63843
63960
  var GatewayLanguageModel = class {
63844
63961
  constructor(modelId, config2) {
63845
63962
  this.modelId = modelId;
@@ -64439,7 +64556,7 @@ async function getVercelRequestId() {
64439
64556
  var _a92;
64440
64557
  return (_a92 = import_oidc.getContext().headers) == null ? undefined : _a92["x-vercel-id"];
64441
64558
  }
64442
- var VERSION10 = "3.0.82";
64559
+ var VERSION10 = "3.0.84";
64443
64560
  var AI_GATEWAY_PROTOCOL_VERSION = "0.0.1";
64444
64561
  function createGatewayProvider(options = {}) {
64445
64562
  var _a92, _b92;
@@ -64539,6 +64656,15 @@ function createGatewayProvider(options = {}) {
64539
64656
  throw await asGatewayError(error48, await parseAuthMethod(await getHeaders()));
64540
64657
  });
64541
64658
  };
64659
+ const getGenerationInfo = async (params) => {
64660
+ return new GatewayGenerationInfoFetcher({
64661
+ baseURL,
64662
+ headers: getHeaders,
64663
+ fetch: options.fetch
64664
+ }).getGenerationInfo(params).catch(async (error48) => {
64665
+ throw await asGatewayError(error48, await parseAuthMethod(await getHeaders()));
64666
+ });
64667
+ };
64542
64668
  const provider = function(modelId) {
64543
64669
  if (new.target) {
64544
64670
  throw new Error("The Gateway Provider model function cannot be called with the new keyword.");
@@ -64549,6 +64675,7 @@ function createGatewayProvider(options = {}) {
64549
64675
  provider.getAvailableModels = getAvailableModels;
64550
64676
  provider.getCredits = getCredits;
64551
64677
  provider.getSpendReport = getSpendReport;
64678
+ provider.getGenerationInfo = getGenerationInfo;
64552
64679
  provider.imageModel = (modelId) => {
64553
64680
  return new GatewayImageModel(modelId, {
64554
64681
  provider: "gateway",
@@ -65197,7 +65324,7 @@ function detectMediaType({
65197
65324
  }
65198
65325
  return;
65199
65326
  }
65200
- var VERSION11 = "6.0.140";
65327
+ var VERSION11 = "6.0.142";
65201
65328
  var download = async ({
65202
65329
  url: url2,
65203
65330
  maxBytes,
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@hongymagic/q",
3
- "version": "2026.330.0",
3
+ "version": "2026.402.0",
4
4
  "description": "Quick AI answers from the command line",
5
5
  "main": "dist/q.js",
6
6
  "type": "module",
@@ -50,15 +50,15 @@
50
50
  "release:dry": "bun run scripts/release.ts --dry-run"
51
51
  },
52
52
  "dependencies": {
53
- "@ai-sdk/amazon-bedrock": "4.0.83",
53
+ "@ai-sdk/amazon-bedrock": "4.0.85",
54
54
  "@ai-sdk/anthropic": "3.0.64",
55
- "@ai-sdk/azure": "3.0.49",
56
- "@ai-sdk/google": "3.0.53",
55
+ "@ai-sdk/azure": "3.0.50",
56
+ "@ai-sdk/google": "3.0.54",
57
57
  "@ai-sdk/groq": "3.0.31",
58
- "@ai-sdk/openai": "3.0.48",
58
+ "@ai-sdk/openai": "3.0.49",
59
59
  "@ai-sdk/openai-compatible": "2.0.37",
60
60
  "@t3-oss/env-core": "0.13.11",
61
- "ai": "6.0.140",
61
+ "ai": "6.0.142",
62
62
  "clipboardy": "5.3.1",
63
63
  "env-paths": "4.0.0",
64
64
  "ollama-ai-provider-v2": "3.5.0",
@@ -66,7 +66,7 @@
66
66
  "zod": "4.3.6"
67
67
  },
68
68
  "devDependencies": {
69
- "@biomejs/biome": "2.4.9",
69
+ "@biomejs/biome": "2.4.10",
70
70
  "@types/bun": "1.3.11",
71
71
  "@vitest/coverage-v8": "4.1.2",
72
72
  "lefthook": "2.1.4",