@hongymagic/q 0.6.0 → 0.7.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/README.md +2 -1
  2. package/dist/q.js +63 -33
  3. package/package.json +18 -18
package/README.md CHANGED
@@ -60,6 +60,7 @@ echo "how do I restart docker" | q
60
60
  | `-p, --provider <name>` | Override the default provider |
61
61
  | `-m, --model <id>` | Override the default model |
62
62
  | `--copy` | Copy answer to clipboard |
63
+ | `--no-copy` | Disable copy (overrides config) |
63
64
  | `-h, --help` | Show help message |
64
65
 
65
66
  ### Commands
@@ -76,7 +77,7 @@ Config is loaded from (later overrides earlier):
76
77
 
77
78
  1. `~/.config/q/config.toml`
78
79
  2. `./config.toml` (project-specific)
79
- 3. Environment: `Q_PROVIDER`, `Q_MODEL`
80
+ 3. Environment: `Q_PROVIDER`, `Q_MODEL`, `Q_COPY`
80
81
 
81
82
  See [config.example.toml](config.example.toml) for all options.
82
83
 
package/dist/q.js CHANGED
@@ -14522,7 +14522,7 @@ import { parseArgs } from "node:util";
14522
14522
  // package.json
14523
14523
  var package_default = {
14524
14524
  name: "@hongymagic/q",
14525
- version: "0.6.0",
14525
+ version: "0.7.0",
14526
14526
  description: "Quick AI answers from the command line",
14527
14527
  main: "dist/q.js",
14528
14528
  type: "module",
@@ -14569,25 +14569,25 @@ var package_default = {
14569
14569
  "build:windows-x64": "bun build --compile --minify --target=bun-windows-x64 ./src/cli.ts --outfile dist/q-windows-x64.exe"
14570
14570
  },
14571
14571
  dependencies: {
14572
- "@ai-sdk/amazon-bedrock": "^4.0.64",
14573
- "@ai-sdk/anthropic": "^3.0.46",
14574
- "@ai-sdk/azure": "^3.0.34",
14575
- "@ai-sdk/google": "^3.0.31",
14576
- "@ai-sdk/groq": "^3.0.24",
14577
- "@ai-sdk/openai": "^3.0.31",
14578
- "@ai-sdk/openai-compatible": "^2.0.30",
14579
- "@t3-oss/env-core": "^0.13.10",
14580
- ai: "^6.0.97",
14581
- clipboardy: "^5.3.0",
14582
- "ollama-ai-provider-v2": "^3.3.1",
14583
- zod: "^4.3.6"
14572
+ "@ai-sdk/amazon-bedrock": "4.0.64",
14573
+ "@ai-sdk/anthropic": "3.0.47",
14574
+ "@ai-sdk/azure": "3.0.34",
14575
+ "@ai-sdk/google": "3.0.31",
14576
+ "@ai-sdk/groq": "3.0.24",
14577
+ "@ai-sdk/openai": "3.0.33",
14578
+ "@ai-sdk/openai-compatible": "2.0.30",
14579
+ "@t3-oss/env-core": "0.13.10",
14580
+ ai: "6.0.99",
14581
+ clipboardy: "5.3.0",
14582
+ "ollama-ai-provider-v2": "3.3.1",
14583
+ zod: "4.3.6"
14584
14584
  },
14585
14585
  devDependencies: {
14586
- "@biomejs/biome": "^2.4.4",
14587
- "@types/bun": "latest",
14588
- lefthook: "^2.1.1",
14589
- typescript: "^5.9.3",
14590
- vitest: "^4.0.18"
14586
+ "@biomejs/biome": "2.4.4",
14587
+ "@types/bun": "1.3.9",
14588
+ lefthook: "2.1.1",
14589
+ typescript: "5.9.3",
14590
+ vitest: "4.0.18"
14591
14591
  }
14592
14592
  };
14593
14593
 
@@ -14686,6 +14686,7 @@ OPTIONS:
14686
14686
  -p, --provider <name> Override the default provider
14687
14687
  -m, --model <id> Override the default model
14688
14688
  --copy Copy answer to clipboard
14689
+ --no-copy Disable copy (overrides config)
14689
14690
  --debug Enable debug logging to stderr
14690
14691
  -h, --help Show this help message
14691
14692
  -v, --version Show version
@@ -14693,12 +14694,13 @@ OPTIONS:
14693
14694
  ENVIRONMENT:
14694
14695
  Q_PROVIDER Override default provider
14695
14696
  Q_MODEL Override default model
14697
+ Q_COPY Override default copy behaviour (true/false)
14696
14698
 
14697
14699
  CONFIG:
14698
14700
  Config is loaded from (in order, later overrides earlier):
14699
14701
  1. ~/.config/q/config.toml (or $XDG_CONFIG_HOME/q/config.toml)
14700
14702
  2. ./config.toml (current directory)
14701
- 3. Environment variables (Q_PROVIDER, Q_MODEL)
14703
+ 3. Environment variables (Q_PROVIDER, Q_MODEL, Q_COPY)
14702
14704
 
14703
14705
  EXAMPLES:
14704
14706
  q how do I restart docker
@@ -14713,6 +14715,7 @@ function parseCliArgs(argv = Bun.argv.slice(2)) {
14713
14715
  provider: { type: "string", short: "p" },
14714
14716
  model: { type: "string", short: "m" },
14715
14717
  copy: { type: "boolean", default: false },
14718
+ "no-copy": { type: "boolean", default: false },
14716
14719
  debug: { type: "boolean", default: false },
14717
14720
  help: { type: "boolean", short: "h", default: false },
14718
14721
  version: { type: "boolean", short: "v", default: false }
@@ -14724,6 +14727,7 @@ function parseCliArgs(argv = Bun.argv.slice(2)) {
14724
14727
  provider: values.provider,
14725
14728
  model: values.model,
14726
14729
  copy: values.copy ?? false,
14730
+ noCopy: values["no-copy"] ?? false,
14727
14731
  debug: values.debug ?? false,
14728
14732
  help: values.help ?? false,
14729
14733
  version: values.version ?? false
@@ -28402,10 +28406,21 @@ function createEnv(opts) {
28402
28406
  }
28403
28407
 
28404
28408
  // src/env.ts
28409
+ var booleanString = exports_external.string().optional().transform((val) => {
28410
+ if (val === undefined)
28411
+ return;
28412
+ const lower = val.toLowerCase();
28413
+ if (lower === "true" || lower === "1")
28414
+ return true;
28415
+ if (lower === "false" || lower === "0")
28416
+ return false;
28417
+ throw new Error(`Invalid Q_COPY value: '${val}'. Use 'true', 'false', '1', or '0'.`);
28418
+ });
28405
28419
  var env2 = createEnv({
28406
28420
  server: {
28407
28421
  Q_PROVIDER: exports_external.string().optional(),
28408
- Q_MODEL: exports_external.string().optional()
28422
+ Q_MODEL: exports_external.string().optional(),
28423
+ Q_COPY: booleanString
28409
28424
  },
28410
28425
  runtimeEnv: process.env,
28411
28426
  emptyStringAsUndefined: true
@@ -28438,7 +28453,8 @@ var ProviderConfigSchema = exports_external.object({
28438
28453
  });
28439
28454
  var DefaultConfigSchema = exports_external.object({
28440
28455
  provider: exports_external.string(),
28441
- model: exports_external.string()
28456
+ model: exports_external.string(),
28457
+ copy: exports_external.boolean().optional()
28442
28458
  });
28443
28459
  var ConfigSchema = exports_external.object({
28444
28460
  default: DefaultConfigSchema,
@@ -28492,7 +28508,8 @@ class Config {
28492
28508
  const finalDefault = {
28493
28509
  ...mergedDefault,
28494
28510
  ...env2.Q_PROVIDER ? { provider: env2.Q_PROVIDER } : {},
28495
- ...env2.Q_MODEL ? { model: env2.Q_MODEL } : {}
28511
+ ...env2.Q_MODEL ? { model: env2.Q_MODEL } : {},
28512
+ ...env2.Q_COPY !== undefined ? { copy: env2.Q_COPY } : {}
28496
28513
  };
28497
28514
  const merged = {
28498
28515
  default: finalDefault,
@@ -28609,11 +28626,12 @@ var EXAMPLE_CONFIG = `# q configuration file
28609
28626
  # Config resolution order (later overrides earlier):
28610
28627
  # 1. This file (XDG_CONFIG_HOME/q/config.toml or ~/.config/q/config.toml)
28611
28628
  # 2. ./config.toml in current directory (project-specific)
28612
- # 3. Environment variables: Q_PROVIDER, Q_MODEL
28629
+ # 3. Environment variables: Q_PROVIDER, Q_MODEL, Q_COPY
28613
28630
 
28614
28631
  [default]
28615
28632
  provider = "anthropic"
28616
28633
  model = "claude-sonnet-4-20250514"
28634
+ # copy = true # Always copy answer to clipboard (override with --no-copy)
28617
28635
 
28618
28636
  [providers.anthropic]
28619
28637
  type = "anthropic"
@@ -35336,7 +35354,7 @@ async function* executeTool({
35336
35354
  }
35337
35355
 
35338
35356
  // node_modules/@ai-sdk/anthropic/dist/index.mjs
35339
- var VERSION3 = "3.0.46";
35357
+ var VERSION3 = "3.0.47";
35340
35358
  var anthropicErrorDataSchema = lazySchema(() => zodSchema(exports_external.object({
35341
35359
  type: exports_external.literal("error"),
35342
35360
  error: exports_external.object({
@@ -37623,6 +37641,9 @@ var AnthropicMessagesLanguageModel = class {
37623
37641
  ...(anthropicOptions == null ? undefined : anthropicOptions.speed) && {
37624
37642
  speed: anthropicOptions.speed
37625
37643
  },
37644
+ ...(anthropicOptions == null ? undefined : anthropicOptions.cacheControl) && {
37645
+ cache_control: anthropicOptions.cacheControl
37646
+ },
37626
37647
  ...useStructuredOutput && (responseFormat == null ? undefined : responseFormat.type) === "json" && responseFormat.schema != null && {
37627
37648
  output_format: {
37628
37649
  type: "json_schema",
@@ -39347,7 +39368,7 @@ function createAnthropic(options = {}) {
39347
39368
  }
39348
39369
  var anthropic = createAnthropic();
39349
39370
 
39350
- // node_modules/@ai-sdk/azure/node_modules/@ai-sdk/openai/dist/internal/index.mjs
39371
+ // node_modules/@ai-sdk/openai/dist/internal/index.mjs
39351
39372
  var openaiErrorDataSchema = exports_external.object({
39352
39373
  error: exports_external.object({
39353
39374
  message: exports_external.string(),
@@ -44615,7 +44636,7 @@ function createAzureProvider(config2, providerName) {
44615
44636
  });
44616
44637
  }
44617
44638
 
44618
- // node_modules/@ai-sdk/amazon-bedrock/node_modules/@ai-sdk/anthropic/dist/internal/index.mjs
44639
+ // node_modules/@ai-sdk/anthropic/dist/internal/index.mjs
44619
44640
  var anthropicErrorDataSchema2 = lazySchema(() => zodSchema(exports_external.object({
44620
44641
  type: exports_external.literal("error"),
44621
44642
  error: exports_external.object({
@@ -54339,7 +54360,8 @@ var modelMaxImagesPerCall3 = {
54339
54360
  "dall-e-2": 10,
54340
54361
  "gpt-image-1": 10,
54341
54362
  "gpt-image-1-mini": 10,
54342
- "gpt-image-1.5": 10
54363
+ "gpt-image-1.5": 10,
54364
+ "chatgpt-image-latest": 10
54343
54365
  };
54344
54366
  var defaultResponseFormatPrefixes2 = [
54345
54367
  "gpt-image-1-mini",
@@ -56052,7 +56074,9 @@ var openaiResponsesReasoningModelIds2 = [
56052
56074
  "gpt-5.1-codex-max",
56053
56075
  "gpt-5.2",
56054
56076
  "gpt-5.2-chat-latest",
56055
- "gpt-5.2-pro"
56077
+ "gpt-5.2-pro",
56078
+ "gpt-5.2-codex",
56079
+ "gpt-5.3-codex"
56056
56080
  ];
56057
56081
  var openaiResponsesModelIds2 = [
56058
56082
  "gpt-4.1",
@@ -58085,7 +58109,7 @@ var OpenAITranscriptionModel2 = class {
58085
58109
  };
58086
58110
  }
58087
58111
  };
58088
- var VERSION8 = "3.0.31";
58112
+ var VERSION8 = "3.0.33";
58089
58113
  function createOpenAI(options = {}) {
58090
58114
  var _a16, _b16;
58091
58115
  const baseURL = (_a16 = withoutTrailingSlash(loadOptionalSetting({
@@ -60912,7 +60936,7 @@ async function getVercelRequestId() {
60912
60936
  var _a92;
60913
60937
  return (_a92 = import_oidc.getContext().headers) == null ? undefined : _a92["x-vercel-id"];
60914
60938
  }
60915
- var VERSION10 = "3.0.53";
60939
+ var VERSION10 = "3.0.55";
60916
60940
  var AI_GATEWAY_PROTOCOL_VERSION = "0.0.1";
60917
60941
  function createGatewayProvider(options = {}) {
60918
60942
  var _a92, _b92;
@@ -60952,13 +60976,18 @@ function createGatewayProvider(options = {}) {
60952
60976
  settingValue: undefined,
60953
60977
  environmentVariableName: "VERCEL_REGION"
60954
60978
  });
60979
+ const projectId = loadOptionalSetting({
60980
+ settingValue: undefined,
60981
+ environmentVariableName: "VERCEL_PROJECT_ID"
60982
+ });
60955
60983
  return async () => {
60956
60984
  const requestId = await getVercelRequestId();
60957
60985
  return {
60958
60986
  ...deploymentId && { "ai-o11y-deployment-id": deploymentId },
60959
60987
  ...environment && { "ai-o11y-environment": environment },
60960
60988
  ...region && { "ai-o11y-region": region },
60961
- ...requestId && { "ai-o11y-request-id": requestId }
60989
+ ...requestId && { "ai-o11y-request-id": requestId },
60990
+ ...projectId && { "ai-o11y-project-id": projectId }
60962
60991
  };
60963
60992
  };
60964
60993
  };
@@ -61643,7 +61672,7 @@ function detectMediaType({
61643
61672
  }
61644
61673
  return;
61645
61674
  }
61646
- var VERSION11 = "6.0.97";
61675
+ var VERSION11 = "6.0.99";
61647
61676
  var download = async ({
61648
61677
  url: url2,
61649
61678
  maxBytes,
@@ -67393,7 +67422,8 @@ async function main() {
67393
67422
  context: context2,
67394
67423
  systemPrompt: buildSystemPrompt(envInfo)
67395
67424
  });
67396
- if (args.options.copy) {
67425
+ const shouldCopy = !args.options.noCopy && (args.options.copy || config2.default.copy);
67426
+ if (shouldCopy) {
67397
67427
  await clipboardy_default.write(result.text);
67398
67428
  logDebug2("Copied to clipboard", debug);
67399
67429
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@hongymagic/q",
3
- "version": "0.6.0",
3
+ "version": "0.7.0",
4
4
  "description": "Quick AI answers from the command line",
5
5
  "main": "dist/q.js",
6
6
  "type": "module",
@@ -47,24 +47,24 @@
47
47
  "build:windows-x64": "bun build --compile --minify --target=bun-windows-x64 ./src/cli.ts --outfile dist/q-windows-x64.exe"
48
48
  },
49
49
  "dependencies": {
50
- "@ai-sdk/amazon-bedrock": "^4.0.64",
51
- "@ai-sdk/anthropic": "^3.0.46",
52
- "@ai-sdk/azure": "^3.0.34",
53
- "@ai-sdk/google": "^3.0.31",
54
- "@ai-sdk/groq": "^3.0.24",
55
- "@ai-sdk/openai": "^3.0.31",
56
- "@ai-sdk/openai-compatible": "^2.0.30",
57
- "@t3-oss/env-core": "^0.13.10",
58
- "ai": "^6.0.97",
59
- "clipboardy": "^5.3.0",
60
- "ollama-ai-provider-v2": "^3.3.1",
61
- "zod": "^4.3.6"
50
+ "@ai-sdk/amazon-bedrock": "4.0.64",
51
+ "@ai-sdk/anthropic": "3.0.47",
52
+ "@ai-sdk/azure": "3.0.34",
53
+ "@ai-sdk/google": "3.0.31",
54
+ "@ai-sdk/groq": "3.0.24",
55
+ "@ai-sdk/openai": "3.0.33",
56
+ "@ai-sdk/openai-compatible": "2.0.30",
57
+ "@t3-oss/env-core": "0.13.10",
58
+ "ai": "6.0.99",
59
+ "clipboardy": "5.3.0",
60
+ "ollama-ai-provider-v2": "3.3.1",
61
+ "zod": "4.3.6"
62
62
  },
63
63
  "devDependencies": {
64
- "@biomejs/biome": "^2.4.4",
65
- "@types/bun": "latest",
66
- "lefthook": "^2.1.1",
67
- "typescript": "^5.9.3",
68
- "vitest": "^4.0.18"
64
+ "@biomejs/biome": "2.4.4",
65
+ "@types/bun": "1.3.9",
66
+ "lefthook": "2.1.1",
67
+ "typescript": "5.9.3",
68
+ "vitest": "4.0.18"
69
69
  }
70
70
  }