qingflow-mcp 0.3.12 → 0.3.13

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/README.md +10 -1
  2. package/dist/server.js +109 -41
  3. package/package.json +1 -1
package/README.md CHANGED
@@ -108,7 +108,7 @@ npm i -g git+https://github.com/853046310/qingflow-mcp.git
108
108
  Install from npm (pinned version):
109
109
 
110
110
  ```bash
111
- npm i -g qingflow-mcp@0.3.12
111
+ npm i -g qingflow-mcp@0.3.13
112
112
  ```
113
113
 
114
114
  Or one-click installer:
@@ -184,6 +184,7 @@ Deterministic read protocol (list/summary/aggregate):
184
184
  1. output profile:
185
185
  - default `output_profile=compact`: return core data only (`rows/row/groups/summary` + `next_page_token`)
186
186
  - `output_profile=verbose`: include full contract (`completeness` + `evidence` + `meta`)
187
+ - exception: `qf_query(summary)` and `qf_records_aggregate` always return `completeness`, even in `compact`, so agents can block on incomplete statistics
187
188
  2. when `output_profile=verbose`, `completeness` fields are:
188
189
  - `result_amount`
189
190
  - `returned_items`
@@ -204,9 +205,17 @@ Deterministic read protocol (list/summary/aggregate):
204
205
  - `time_range`
205
206
  - `source_pages`
206
207
  4. `strict_full=true` makes incomplete results fail fast with `NEED_MORE_DATA`.
208
+ - for `qf_query(summary)`, `strict_full` enforces raw source scan completeness; sample rows may still be capped by `max_rows`, which is reflected by `output_page_complete=false`
207
209
  5. Error payloads expose `error_code` and `fix_hint` for actionable retries.
208
210
  6. Parameter tolerance supports stringified JSON and numeric/boolean strings for key query fields.
209
211
 
212
+ For `qf_query(summary)` and `qf_records_aggregate`, read `data.summary.completeness` / `data.completeness` before concluding:
213
+
214
+ 1. `raw_scan_complete=false`: source data is not fully scanned, do not produce a final conclusion.
215
+ 2. `scan_limit_hit=true`: query stopped because scan budget was hit.
216
+ 3. `output_page_complete=false`: source may be complete, but output was truncated by `max_rows` or `max_groups`.
217
+ 4. `raw_next_page_token`: use this token to continue raw scan pagination (`next_page_token` remains as a backward-compatible alias).
218
+
210
219
  ## List Query Tips
211
220
 
212
221
  Strict mode (`qf_records_list`):
package/dist/server.js CHANGED
@@ -61,7 +61,7 @@ const ADAPTIVE_TARGET_PAGE_MS = toPositiveInt(process.env.QINGFLOW_ADAPTIVE_TARG
61
61
  const MAX_LIST_ITEMS_BYTES = toPositiveInt(process.env.QINGFLOW_LIST_MAX_ITEMS_BYTES) ?? 400000;
62
62
  const REQUEST_TIMEOUT_MS = toPositiveInt(process.env.QINGFLOW_REQUEST_TIMEOUT_MS) ?? 18000;
63
63
  const EXECUTION_BUDGET_MS = toPositiveInt(process.env.QINGFLOW_EXECUTION_BUDGET_MS) ?? 20000;
64
- const SERVER_VERSION = "0.3.12";
64
+ const SERVER_VERSION = "0.3.13";
65
65
  const accessToken = process.env.QINGFLOW_ACCESS_TOKEN;
66
66
  const baseUrl = process.env.QINGFLOW_BASE_URL;
67
67
  if (!accessToken) {
@@ -143,7 +143,15 @@ const completenessSchema = z.object({
143
143
  is_complete: z.boolean(),
144
144
  partial: z.boolean(),
145
145
  omitted_items: z.number().int().nonnegative(),
146
- omitted_chars: z.number().int().nonnegative()
146
+ omitted_chars: z.number().int().nonnegative(),
147
+ raw_scan_complete: z.boolean().optional(),
148
+ scan_limit_hit: z.boolean().optional(),
149
+ scanned_pages: z.number().int().nonnegative().optional(),
150
+ scan_limit: z.number().int().positive().optional(),
151
+ output_page_complete: z.boolean().optional(),
152
+ raw_next_page_token: z.string().nullable().optional(),
153
+ output_next_page_token: z.string().nullable().optional(),
154
+ stop_reason: z.string().nullable().optional()
147
155
  });
148
156
  const evidenceSchema = z.object({
149
157
  query_id: z.string(),
@@ -2846,6 +2854,35 @@ function applyAdaptivePaging(params) {
2846
2854
  }
2847
2855
  return { shouldStop: false };
2848
2856
  }
2857
+ function resolveScanLimit(requestedPages, scanMaxPages) {
2858
+ return Math.max(1, Math.min(requestedPages, scanMaxPages));
2859
+ }
2860
+ function buildExtendedCompleteness(params) {
2861
+ const rawScanComplete = params.rawScanComplete ?? (!params.hasMore && params.omittedItems === 0);
2862
+ const outputPageComplete = params.outputPageComplete ?? (params.omittedItems === 0 && params.omittedChars === 0);
2863
+ const isComplete = rawScanComplete && outputPageComplete;
2864
+ return {
2865
+ result_amount: params.resultAmount,
2866
+ returned_items: params.returnedItems,
2867
+ fetched_pages: params.fetchedPages,
2868
+ requested_pages: params.requestedPages,
2869
+ actual_scanned_pages: params.fetchedPages,
2870
+ has_more: params.hasMore,
2871
+ next_page_token: params.nextPageToken,
2872
+ is_complete: isComplete,
2873
+ partial: !isComplete,
2874
+ omitted_items: params.omittedItems,
2875
+ omitted_chars: params.omittedChars,
2876
+ raw_scan_complete: rawScanComplete,
2877
+ scan_limit_hit: params.scanLimitHit ?? false,
2878
+ scanned_pages: params.scannedPages ?? params.fetchedPages,
2879
+ scan_limit: params.scanLimit,
2880
+ output_page_complete: outputPageComplete,
2881
+ raw_next_page_token: params.rawNextPageToken ?? params.nextPageToken,
2882
+ output_next_page_token: params.outputNextPageToken ?? null,
2883
+ stop_reason: params.stopReason ?? null
2884
+ };
2885
+ }
2849
2886
  function normalizePlanToolName(tool) {
2850
2887
  const normalized = tool.trim().replace(/^qingflow-mcp__/, "");
2851
2888
  const allowed = new Set([
@@ -4360,6 +4397,7 @@ async function executeRecordsSummary(args) {
4360
4397
  let nextPageNum = null;
4361
4398
  let resultAmount = null;
4362
4399
  let summaryMeta = null;
4400
+ let stopReason = null;
4363
4401
  let totalAmount = 0;
4364
4402
  let missingCount = 0;
4365
4403
  const sourcePages = [];
@@ -4369,6 +4407,7 @@ async function executeRecordsSummary(args) {
4369
4407
  if (scannedPages > 0 && isExecutionBudgetExceeded(startedAt)) {
4370
4408
  hasMore = true;
4371
4409
  nextPageNum = currentPage;
4410
+ stopReason = "execution_budget";
4372
4411
  break;
4373
4412
  }
4374
4413
  const activePageSize = adaptivePaging.current_page_size;
@@ -4442,9 +4481,11 @@ async function executeRecordsSummary(args) {
4442
4481
  });
4443
4482
  if (adaptiveDecision.shouldStop && hasMore) {
4444
4483
  nextPageNum = nextPageNum ?? currentPage + 1;
4484
+ stopReason = "adaptive_budget";
4445
4485
  break;
4446
4486
  }
4447
4487
  if (!hasMore) {
4488
+ stopReason = "source_exhausted";
4448
4489
  break;
4449
4490
  }
4450
4491
  currentPage = currentPage + 1;
@@ -4491,30 +4532,41 @@ async function executeRecordsSummary(args) {
4491
4532
  throw new Error("Failed to build summary metadata");
4492
4533
  }
4493
4534
  const knownResultAmount = resultAmount ?? scannedRecords;
4494
- const omittedItems = Math.max(0, knownResultAmount - scannedRecords);
4495
- const isComplete = !hasMore && omittedItems === 0;
4496
- const nextPageToken = hasMore && nextPageNum
4535
+ const omittedSourceItems = Math.max(0, knownResultAmount - scannedRecords);
4536
+ const rawNextPageToken = hasMore && nextPageNum
4497
4537
  ? encodeContinuationToken({
4498
4538
  app_key: args.app_key,
4499
4539
  next_page_num: nextPageNum,
4500
4540
  page_size: adaptivePaging.current_page_size
4501
4541
  })
4502
4542
  : null;
4503
- const completeness = {
4504
- result_amount: knownResultAmount,
4505
- returned_items: scannedRecords,
4506
- fetched_pages: scannedPages,
4507
- requested_pages: requestedPages,
4508
- actual_scanned_pages: scannedPages,
4509
- has_more: hasMore,
4510
- next_page_token: nextPageToken,
4511
- is_complete: isComplete,
4512
- partial: !isComplete,
4513
- omitted_items: omittedItems,
4514
- omitted_chars: 0
4515
- };
4543
+ const rawScanComplete = !hasMore && omittedSourceItems === 0;
4544
+ const outputPageComplete = rows.length >= scannedRecords;
4545
+ const scanLimit = resolveScanLimit(requestedPages, scanMaxPages);
4546
+ const scanLimitHit = !rawScanComplete &&
4547
+ (scannedPages >= scanLimit ||
4548
+ stopReason === "execution_budget" ||
4549
+ stopReason === "adaptive_budget");
4550
+ const completeness = buildExtendedCompleteness({
4551
+ resultAmount: knownResultAmount,
4552
+ returnedItems: scannedRecords,
4553
+ fetchedPages: scannedPages,
4554
+ requestedPages,
4555
+ hasMore,
4556
+ nextPageToken: rawNextPageToken,
4557
+ omittedItems: omittedSourceItems,
4558
+ omittedChars: 0,
4559
+ rawScanComplete,
4560
+ scanLimitHit,
4561
+ scannedPages,
4562
+ scanLimit,
4563
+ outputPageComplete,
4564
+ rawNextPageToken,
4565
+ outputNextPageToken: null,
4566
+ stopReason
4567
+ });
4516
4568
  const evidence = buildEvidencePayload(listState, sourcePages);
4517
- if (strictFull && !isComplete) {
4569
+ if (strictFull && !rawScanComplete) {
4518
4570
  throw new NeedMoreDataError("Summary is incomplete. Continue with next_page_token or increase requested_pages/scan_max_pages.", {
4519
4571
  code: "NEED_MORE_DATA",
4520
4572
  completeness,
@@ -4530,9 +4582,9 @@ async function executeRecordsSummary(args) {
4530
4582
  missing_count: missingCount
4531
4583
  },
4532
4584
  rows,
4585
+ completeness,
4533
4586
  ...(isVerboseProfile(outputProfile)
4534
4587
  ? {
4535
- completeness,
4536
4588
  evidence,
4537
4589
  meta: {
4538
4590
  field_mapping: fieldMapping,
@@ -4554,7 +4606,7 @@ async function executeRecordsSummary(args) {
4554
4606
  execution: {
4555
4607
  scanned_records: scannedRecords,
4556
4608
  scanned_pages: scannedPages,
4557
- truncated: !isComplete,
4609
+ truncated: !completeness.is_complete,
4558
4610
  row_cap: rowCap,
4559
4611
  column_cap: args.max_columns ?? null,
4560
4612
  scan_max_pages: scanMaxPages
@@ -4564,7 +4616,7 @@ async function executeRecordsSummary(args) {
4564
4616
  : {})
4565
4617
  },
4566
4618
  meta: summaryMeta,
4567
- message: isComplete
4619
+ message: completeness.is_complete
4568
4620
  ? `Summarized ${scannedRecords} records`
4569
4621
  : `Summarized ${scannedRecords}/${knownResultAmount} records (partial)`,
4570
4622
  completeness,
@@ -4636,6 +4688,7 @@ async function executeRecordsAggregate(args) {
4636
4688
  let nextPageNum = null;
4637
4689
  let resultAmount = null;
4638
4690
  let responseMeta = null;
4691
+ let stopReason = null;
4639
4692
  let totalAmount = 0;
4640
4693
  const sourcePages = [];
4641
4694
  const groupStats = new Map();
@@ -4644,6 +4697,7 @@ async function executeRecordsAggregate(args) {
4644
4697
  if (scannedPages > 0 && isExecutionBudgetExceeded(startedAt)) {
4645
4698
  hasMore = true;
4646
4699
  nextPageNum = currentPage;
4700
+ stopReason = "execution_budget";
4647
4701
  break;
4648
4702
  }
4649
4703
  const activePageSize = adaptivePaging.current_page_size;
@@ -4723,9 +4777,11 @@ async function executeRecordsAggregate(args) {
4723
4777
  });
4724
4778
  if (adaptiveDecision.shouldStop && hasMore) {
4725
4779
  nextPageNum = nextPageNum ?? currentPage + 1;
4780
+ stopReason = "adaptive_budget";
4726
4781
  break;
4727
4782
  }
4728
4783
  if (!hasMore) {
4784
+ stopReason = "source_exhausted";
4729
4785
  break;
4730
4786
  }
4731
4787
  currentPage = currentPage + 1;
@@ -4734,30 +4790,42 @@ async function executeRecordsAggregate(args) {
4734
4790
  throw new Error("Failed to fetch aggregate pages");
4735
4791
  }
4736
4792
  const knownResultAmount = resultAmount ?? scannedRecords;
4737
- const omittedItems = Math.max(0, knownResultAmount - scannedRecords);
4738
- const isComplete = !hasMore && omittedItems === 0;
4739
- const nextPageToken = hasMore && nextPageNum
4793
+ const omittedSourceItems = Math.max(0, knownResultAmount - scannedRecords);
4794
+ const rawNextPageToken = hasMore && nextPageNum
4740
4795
  ? encodeContinuationToken({
4741
4796
  app_key: args.app_key,
4742
4797
  next_page_num: nextPageNum,
4743
4798
  page_size: adaptivePaging.current_page_size
4744
4799
  })
4745
4800
  : null;
4746
- const completeness = {
4747
- result_amount: knownResultAmount,
4748
- returned_items: scannedRecords,
4749
- fetched_pages: scannedPages,
4750
- requested_pages: requestedPages,
4751
- actual_scanned_pages: scannedPages,
4752
- has_more: hasMore,
4753
- next_page_token: nextPageToken,
4754
- is_complete: isComplete,
4755
- partial: !isComplete,
4756
- omitted_items: omittedItems,
4757
- omitted_chars: 0
4758
- };
4801
+ const groupsTotal = groupStats.size;
4802
+ const rawScanComplete = !hasMore && omittedSourceItems === 0;
4803
+ const outputPageComplete = groupsTotal <= maxGroups;
4804
+ const scanLimit = resolveScanLimit(requestedPages, scanMaxPages);
4805
+ const scanLimitHit = !rawScanComplete &&
4806
+ (scannedPages >= scanLimit ||
4807
+ stopReason === "execution_budget" ||
4808
+ stopReason === "adaptive_budget");
4809
+ const completeness = buildExtendedCompleteness({
4810
+ resultAmount: knownResultAmount,
4811
+ returnedItems: scannedRecords,
4812
+ fetchedPages: scannedPages,
4813
+ requestedPages,
4814
+ hasMore,
4815
+ nextPageToken: rawNextPageToken,
4816
+ omittedItems: omittedSourceItems,
4817
+ omittedChars: 0,
4818
+ rawScanComplete,
4819
+ scanLimitHit,
4820
+ scannedPages,
4821
+ scanLimit,
4822
+ outputPageComplete,
4823
+ rawNextPageToken,
4824
+ outputNextPageToken: null,
4825
+ stopReason
4826
+ });
4759
4827
  const evidence = buildEvidencePayload(listState, sourcePages);
4760
- if (strictFull && !isComplete) {
4828
+ if (strictFull && !completeness.is_complete) {
4761
4829
  throw new NeedMoreDataError("Aggregate result is incomplete. Continue with next_page_token or increase requested_pages/scan_max_pages.", {
4762
4830
  code: "NEED_MORE_DATA",
4763
4831
  completeness,
@@ -4835,9 +4903,9 @@ async function executeRecordsAggregate(args) {
4835
4903
  : {})
4836
4904
  },
4837
4905
  groups,
4906
+ completeness,
4838
4907
  ...(isVerboseProfile(outputProfile)
4839
4908
  ? {
4840
- completeness,
4841
4909
  evidence,
4842
4910
  meta: {
4843
4911
  field_mapping: fieldMapping,
@@ -4867,7 +4935,7 @@ async function executeRecordsAggregate(args) {
4867
4935
  }
4868
4936
  : {})
4869
4937
  },
4870
- message: isComplete
4938
+ message: completeness.is_complete
4871
4939
  ? `Aggregated ${scannedRecords} records`
4872
4940
  : `Aggregated ${scannedRecords}/${knownResultAmount} records (partial)`
4873
4941
  };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "qingflow-mcp",
3
- "version": "0.3.12",
3
+ "version": "0.3.13",
4
4
  "private": false,
5
5
  "license": "MIT",
6
6
  "type": "module",