qingflow-mcp 0.2.7 → 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/README.md +57 -4
  2. package/dist/server.js +707 -62
  3. package/package.json +1 -1
package/README.md CHANGED
@@ -7,6 +7,7 @@ This MCP server wraps Qingflow OpenAPI for:
7
7
  - `qf_records_list`
8
8
  - `qf_record_get`
9
9
  - `qf_query` (unified read entry: list / record / summary)
10
+ - `qf_records_aggregate` (deterministic grouped metrics)
10
11
  - `qf_record_create`
11
12
  - `qf_record_update`
12
13
  - `qf_operation_get`
@@ -110,6 +111,10 @@ MCP client config example:
110
111
  3. `qf_record_create` or `qf_record_update`.
111
112
  4. If create/update returns only `request_id`, call `qf_operation_get` to resolve async result.
112
113
 
114
+ Full calling contract (Chinese):
115
+
116
+ - [MCP 调用规范](./docs/MCP_CALLING_SPEC.md)
117
+
113
118
  ## Unified Query (`qf_query`)
114
119
 
115
120
  `qf_query` is the recommended read entry for agents.
@@ -120,6 +125,10 @@ MCP client config example:
120
125
  - otherwise route to list query.
121
126
  2. `query_mode=list|record|summary` forces explicit behavior.
122
127
  3. In `list` mode, `time_range` is translated to list filters when `from` or `to` is provided.
128
+ 4. In `list` mode, `select_columns` is required.
129
+ 5. In `list` mode, row cap defaults to 200 when `max_rows` and `max_items` are omitted.
130
+ 6. In `record` mode, `select_columns` is required.
131
+ 7. In `summary` mode, `select_columns` is required (`max_rows` defaults to 200 when omitted).
123
132
 
124
133
  Summary mode output:
125
134
 
@@ -131,6 +140,30 @@ Return shape:
131
140
 
132
141
  1. success: structured payload `{ "ok": true, "data": ..., "meta": ... }`
133
142
  2. failure: MCP `isError=true`, and text content is JSON payload like `{ "ok": false, "message": ..., ... }`
143
+ 3. incomplete strict queries fail with `{ "code": "NEED_MORE_DATA", "status": "need_more_data", ... }`
144
+
145
+ Deterministic read protocol (list/summary/aggregate):
146
+
147
+ 1. `completeness` is always returned:
148
+ - `result_amount`
149
+ - `returned_items`
150
+ - `fetched_pages`
151
+ - `requested_pages`
152
+ - `actual_scanned_pages`
153
+ - `has_more`
154
+ - `next_page_token`
155
+ - `is_complete`
156
+ - `partial`
157
+ - `omitted_items`
158
+ - `omitted_chars`
159
+ 2. `evidence` is always returned:
160
+ - `query_id`
161
+ - `app_key`
162
+ - `filters`
163
+ - `selected_columns`
164
+ - `time_range`
165
+ - `source_pages`
166
+ 3. `strict_full=true` makes incomplete results fail fast with `NEED_MORE_DATA`.
134
167
 
135
168
  ## List Query Tips
136
169
 
@@ -142,10 +175,13 @@ Strict mode (`qf_records_list`):
142
175
 
143
176
  1. For `qf_records_list.sort[].que_id`, use a real field `que_id` (numeric) or exact field title from `qf_form_get`.
144
177
  2. Avoid aliases like `create_time`; Qingflow often rejects them.
145
- 3. Use `max_rows` (or `max_items`) to cap returned rows.
178
+ 3. Use `max_rows` (or `max_items`) to cap returned rows. Default row cap is 200.
146
179
  4. Use `max_columns` to cap answers per row when `include_answers=true`.
147
180
  5. Use `select_columns` to return only specific columns (supports `que_id` or exact field title).
148
- 6. When `include_answers=true`, the server still auto-limits by response size to protect MCP context.
181
+ 6. The server may still trim by response-size guardrail (`QINGFLOW_LIST_MAX_ITEMS_BYTES`) when payload is too large.
182
+ 7. Use `requested_pages` and `scan_max_pages` for deterministic page scan.
183
+ 8. Continue with `page_token` from previous `next_page_token`.
184
+ 9. Column limits: `select_columns <= 10`, `max_columns <= 10`.
149
185
 
150
186
  Example:
151
187
 
@@ -154,10 +190,13 @@ Example:
154
190
  "app_key": "your_app_key",
155
191
  "mode": "all",
156
192
  "page_size": 50,
193
+ "requested_pages": 1,
194
+ "scan_max_pages": 1,
157
195
  "include_answers": true,
158
196
  "max_rows": 10,
159
197
  "max_columns": 5,
160
- "select_columns": [1, "客户名称", "1003"]
198
+ "select_columns": [1, "客户名称", "1003"],
199
+ "strict_full": false
161
200
  }
162
201
  ```
163
202
 
@@ -171,10 +210,24 @@ For single record details (`qf_record_get`), the same column controls are suppor
171
210
  }
172
211
  ```
173
212
 
213
+ `qf_record_get` requires `select_columns`.
214
+
215
+ Aggregate example (`qf_records_aggregate`):
216
+
217
+ ```json
218
+ {
219
+ "app_key": "your_app_key",
220
+ "group_by": ["归属部门", "归属销售"],
221
+ "amount_column": "报价总金额",
222
+ "requested_pages": 50,
223
+ "scan_max_pages": 50,
224
+ "strict_full": true
225
+ }
226
+ ```
227
+
174
228
  Optional env vars:
175
229
 
176
230
  ```bash
177
- export QINGFLOW_LIST_MAX_ITEMS_WITH_ANSWERS=5
178
231
  export QINGFLOW_LIST_MAX_ITEMS_BYTES=400000
179
232
  ```
180
233
 
package/dist/server.js CHANGED
@@ -1,6 +1,7 @@
1
1
  #!/usr/bin/env node
2
2
  import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
3
3
  import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
4
+ import { randomUUID } from "node:crypto";
4
5
  import { z } from "zod";
5
6
  import { QingflowApiError, QingflowClient } from "./qingflow-client.js";
6
7
  const MODE_TO_TYPE = {
@@ -17,10 +18,20 @@ const MODE_TO_TYPE = {
17
18
  all_processing: 11,
18
19
  cc: 12
19
20
  };
21
+ class NeedMoreDataError extends Error {
22
+ code = "NEED_MORE_DATA";
23
+ details;
24
+ constructor(message, details) {
25
+ super(message);
26
+ this.name = "NeedMoreDataError";
27
+ this.details = details;
28
+ }
29
+ }
20
30
  const FORM_CACHE_TTL_MS = Number(process.env.QINGFLOW_FORM_CACHE_TTL_MS ?? "300000");
21
31
  const formCache = new Map();
22
32
  const DEFAULT_PAGE_SIZE = 50;
23
- const DEFAULT_MAX_ITEMS_WITH_ANSWERS = toPositiveInt(process.env.QINGFLOW_LIST_MAX_ITEMS_WITH_ANSWERS) ?? 5;
33
+ const DEFAULT_SCAN_MAX_PAGES = 50;
34
+ const DEFAULT_ROW_LIMIT = 200;
24
35
  const MAX_LIST_ITEMS_BYTES = toPositiveInt(process.env.QINGFLOW_LIST_MAX_ITEMS_BYTES) ?? 400000;
25
36
  const accessToken = process.env.QINGFLOW_ACCESS_TOKEN;
26
37
  const baseUrl = process.env.QINGFLOW_BASE_URL;
@@ -36,7 +47,7 @@ const client = new QingflowClient({
36
47
  });
37
48
  const server = new McpServer({
38
49
  name: "qingflow-mcp",
39
- version: "0.2.7"
50
+ version: "0.3.0"
40
51
  });
41
52
  const jsonPrimitiveSchema = z.union([z.string(), z.number(), z.boolean(), z.null()]);
42
53
  const answerValueSchema = z.union([
@@ -90,6 +101,34 @@ const apiMetaSchema = z.object({
90
101
  provider_err_msg: z.string().nullable(),
91
102
  base_url: z.string()
92
103
  });
104
+ const completenessSchema = z.object({
105
+ result_amount: z.number().int().nonnegative(),
106
+ returned_items: z.number().int().nonnegative(),
107
+ fetched_pages: z.number().int().nonnegative(),
108
+ requested_pages: z.number().int().positive(),
109
+ actual_scanned_pages: z.number().int().nonnegative(),
110
+ has_more: z.boolean(),
111
+ next_page_token: z.string().nullable(),
112
+ is_complete: z.boolean(),
113
+ partial: z.boolean(),
114
+ omitted_items: z.number().int().nonnegative(),
115
+ omitted_chars: z.number().int().nonnegative()
116
+ });
117
+ const evidenceSchema = z.object({
118
+ query_id: z.string(),
119
+ app_key: z.string(),
120
+ filters: z.array(z.record(z.unknown())),
121
+ selected_columns: z.array(z.string()),
122
+ time_range: z
123
+ .object({
124
+ column: z.string(),
125
+ from: z.string().nullable(),
126
+ to: z.string().nullable(),
127
+ timezone: z.string().nullable()
128
+ })
129
+ .nullable(),
130
+ source_pages: z.array(z.number().int().positive())
131
+ });
93
132
  const appSchema = z.object({
94
133
  appKey: z.string(),
95
134
  appName: z.string()
@@ -154,7 +193,10 @@ const listInputSchema = z
154
193
  app_key: z.string().min(1),
155
194
  user_id: z.string().min(1).optional(),
156
195
  page_num: z.number().int().positive().optional(),
196
+ page_token: z.string().min(1).optional(),
157
197
  page_size: z.number().int().positive().max(200).optional(),
198
+ requested_pages: z.number().int().positive().max(500).optional(),
199
+ scan_max_pages: z.number().int().positive().max(500).optional(),
158
200
  mode: z
159
201
  .enum([
160
202
  "todo",
@@ -193,15 +235,27 @@ const listInputSchema = z
193
235
  search_user_ids: z.array(z.string()).optional()
194
236
  }))
195
237
  .optional(),
238
+ time_range: z
239
+ .object({
240
+ column: z.union([z.string().min(1), z.number().int()]),
241
+ from: z.string().optional(),
242
+ to: z.string().optional(),
243
+ timezone: z.string().optional()
244
+ })
245
+ .optional(),
196
246
  max_rows: z.number().int().positive().max(200).optional(),
197
247
  max_items: z.number().int().positive().max(200).optional(),
198
- max_columns: z.number().int().positive().max(200).optional(),
248
+ max_columns: z.number().int().positive().max(10).optional(),
199
249
  // Strict mode: callers must explicitly choose columns.
200
- select_columns: z.array(z.union([z.string().min(1), z.number().int()])).min(1).max(200),
201
- include_answers: z.boolean().optional()
250
+ select_columns: z.array(z.union([z.string().min(1), z.number().int()])).min(1).max(10),
251
+ include_answers: z.boolean().optional(),
252
+ strict_full: z.boolean().optional()
202
253
  })
203
254
  .refine((value) => value.include_answers !== false, {
204
255
  message: "include_answers=false is not allowed in strict column mode"
256
+ })
257
+ .refine((value) => !(value.page_num !== undefined && value.page_token !== undefined), {
258
+ message: "page_num and page_token cannot be used together"
205
259
  });
206
260
  const listSuccessOutputSchema = z.object({
207
261
  ok: z.literal(true),
@@ -221,19 +275,20 @@ const listSuccessOutputSchema = z.object({
221
275
  column_cap: z.number().int().positive().nullable(),
222
276
  selected_columns: z.array(z.string())
223
277
  })
224
- .optional()
278
+ .optional(),
279
+ completeness: completenessSchema,
280
+ evidence: evidenceSchema
225
281
  }),
226
282
  meta: apiMetaSchema
227
283
  });
228
284
  const listOutputSchema = listSuccessOutputSchema;
229
285
  const recordGetInputSchema = z.object({
230
286
  apply_id: z.union([z.string().min(1), z.number().int()]),
231
- max_columns: z.number().int().positive().max(200).optional(),
287
+ max_columns: z.number().int().positive().max(10).optional(),
232
288
  select_columns: z
233
289
  .array(z.union([z.string().min(1), z.number().int()]))
234
290
  .min(1)
235
- .max(200)
236
- .optional()
291
+ .max(10)
237
292
  });
238
293
  const recordGetSuccessOutputSchema = z.object({
239
294
  ok: z.literal(true),
@@ -246,7 +301,13 @@ const recordGetSuccessOutputSchema = z.object({
246
301
  column_cap: z.number().int().positive().nullable(),
247
302
  selected_columns: z.array(z.string()).nullable()
248
303
  })
249
- .optional()
304
+ .optional(),
305
+ completeness: completenessSchema,
306
+ evidence: z.object({
307
+ query_id: z.string(),
308
+ apply_id: z.string(),
309
+ selected_columns: z.array(z.string())
310
+ })
250
311
  }),
251
312
  meta: apiMetaSchema
252
313
  });
@@ -316,7 +377,9 @@ const queryInputSchema = z.object({
316
377
  apply_id: z.union([z.string().min(1), z.number().int()]).optional(),
317
378
  user_id: z.string().min(1).optional(),
318
379
  page_num: z.number().int().positive().optional(),
380
+ page_token: z.string().min(1).optional(),
319
381
  page_size: z.number().int().positive().max(200).optional(),
382
+ requested_pages: z.number().int().positive().max(500).optional(),
320
383
  mode: z
321
384
  .enum([
322
385
  "todo",
@@ -357,11 +420,11 @@ const queryInputSchema = z.object({
357
420
  .optional(),
358
421
  max_rows: z.number().int().positive().max(200).optional(),
359
422
  max_items: z.number().int().positive().max(200).optional(),
360
- max_columns: z.number().int().positive().max(200).optional(),
423
+ max_columns: z.number().int().positive().max(10).optional(),
361
424
  select_columns: z
362
425
  .array(z.union([z.string().min(1), z.number().int()]))
363
426
  .min(1)
364
- .max(200)
427
+ .max(10)
365
428
  .optional(),
366
429
  include_answers: z.boolean().optional(),
367
430
  amount_column: z.union([z.string().min(1), z.number().int()]).optional(),
@@ -379,7 +442,11 @@ const queryInputSchema = z.object({
379
442
  include_null: z.boolean().optional()
380
443
  })
381
444
  .optional(),
382
- scan_max_pages: z.number().int().positive().max(500).optional()
445
+ scan_max_pages: z.number().int().positive().max(500).optional(),
446
+ strict_full: z.boolean().optional()
447
+ })
448
+ .refine((value) => !(value.page_num !== undefined && value.page_token !== undefined), {
449
+ message: "page_num and page_token cannot be used together"
383
450
  });
384
451
  const querySummaryOutputSchema = z.object({
385
452
  summary: z.object({
@@ -393,6 +460,8 @@ const querySummaryOutputSchema = z.object({
393
460
  missing_count: z.number().int().nonnegative()
394
461
  }),
395
462
  rows: z.array(z.record(z.unknown())),
463
+ completeness: completenessSchema,
464
+ evidence: evidenceSchema,
396
465
  meta: z.object({
397
466
  field_mapping: z.array(z.object({
398
467
  role: z.enum(["row", "amount", "time"]),
@@ -438,6 +507,108 @@ const querySuccessOutputSchema = z.object({
438
507
  meta: apiMetaSchema
439
508
  });
440
509
  const queryOutputSchema = querySuccessOutputSchema;
510
+ const aggregateInputSchema = z
511
+ .object({
512
+ app_key: z.string().min(1),
513
+ user_id: z.string().min(1).optional(),
514
+ page_num: z.number().int().positive().optional(),
515
+ page_token: z.string().min(1).optional(),
516
+ page_size: z.number().int().positive().max(200).optional(),
517
+ requested_pages: z.number().int().positive().max(500).optional(),
518
+ scan_max_pages: z.number().int().positive().max(500).optional(),
519
+ mode: z
520
+ .enum([
521
+ "todo",
522
+ "done",
523
+ "mine_approved",
524
+ "mine_rejected",
525
+ "mine_draft",
526
+ "mine_need_improve",
527
+ "mine_processing",
528
+ "all",
529
+ "all_approved",
530
+ "all_rejected",
531
+ "all_processing",
532
+ "cc"
533
+ ])
534
+ .optional(),
535
+ type: z.number().int().min(1).max(12).optional(),
536
+ keyword: z.string().optional(),
537
+ query_logic: z.enum(["and", "or"]).optional(),
538
+ apply_ids: z.array(z.union([z.string(), z.number()])).optional(),
539
+ sort: z
540
+ .array(z.object({
541
+ que_id: z.union([z.string().min(1), z.number().int()]),
542
+ ascend: z.boolean().optional()
543
+ }))
544
+ .optional(),
545
+ filters: z
546
+ .array(z.object({
547
+ que_id: z.union([z.string().min(1), z.number().int()]).optional(),
548
+ search_key: z.string().optional(),
549
+ search_keys: z.array(z.string()).optional(),
550
+ min_value: z.string().optional(),
551
+ max_value: z.string().optional(),
552
+ scope: z.number().int().optional(),
553
+ search_options: z.array(z.union([z.string(), z.number()])).optional(),
554
+ search_user_ids: z.array(z.string()).optional()
555
+ }))
556
+ .optional(),
557
+ time_range: z
558
+ .object({
559
+ column: z.union([z.string().min(1), z.number().int()]),
560
+ from: z.string().optional(),
561
+ to: z.string().optional(),
562
+ timezone: z.string().optional()
563
+ })
564
+ .optional(),
565
+ group_by: z.array(z.union([z.string().min(1), z.number().int()])).min(1).max(20),
566
+ amount_column: z.union([z.string().min(1), z.number().int()]).optional(),
567
+ stat_policy: z
568
+ .object({
569
+ include_negative: z.boolean().optional(),
570
+ include_null: z.boolean().optional()
571
+ })
572
+ .optional(),
573
+ max_groups: z.number().int().positive().max(2000).optional(),
574
+ strict_full: z.boolean().optional()
575
+ })
576
+ .refine((value) => !(value.page_num !== undefined && value.page_token !== undefined), {
577
+ message: "page_num and page_token cannot be used together"
578
+ });
579
+ const aggregateOutputSchema = z.object({
580
+ ok: z.literal(true),
581
+ data: z.object({
582
+ app_key: z.string(),
583
+ summary: z.object({
584
+ total_count: z.number().int().nonnegative(),
585
+ total_amount: z.number().nullable()
586
+ }),
587
+ groups: z.array(z.object({
588
+ group: z.record(z.unknown()),
589
+ count: z.number().int().nonnegative(),
590
+ count_ratio: z.number().min(0).max(1),
591
+ amount_total: z.number().nullable(),
592
+ amount_ratio: z.number().nullable()
593
+ })),
594
+ completeness: completenessSchema,
595
+ evidence: evidenceSchema,
596
+ meta: z.object({
597
+ field_mapping: z.array(z.object({
598
+ role: z.enum(["group_by", "amount", "time"]),
599
+ requested: z.string(),
600
+ que_id: z.union([z.string(), z.number()]),
601
+ que_title: z.string().nullable(),
602
+ que_type: z.unknown()
603
+ })),
604
+ stat_policy: z.object({
605
+ include_negative: z.boolean(),
606
+ include_null: z.boolean()
607
+ })
608
+ })
609
+ }),
610
+ meta: apiMetaSchema
611
+ });
441
612
  server.registerTool("qf_apps_list", {
442
613
  title: "Qingflow Apps List",
443
614
  description: "List Qingflow apps with optional filtering and client-side slicing.",
@@ -709,6 +880,24 @@ server.registerTool("qf_operation_get", {
709
880
  return errorResult(error);
710
881
  }
711
882
  });
883
+ server.registerTool("qf_records_aggregate", {
884
+ title: "Qingflow Records Aggregate",
885
+ description: "Aggregate records by group_by columns with optional amount metrics. Designed for deterministic, auditable statistics.",
886
+ inputSchema: aggregateInputSchema,
887
+ outputSchema: aggregateOutputSchema,
888
+ annotations: {
889
+ readOnlyHint: true,
890
+ idempotentHint: true
891
+ }
892
+ }, async (args) => {
893
+ try {
894
+ const executed = await executeRecordsAggregate(args);
895
+ return okResult(executed.payload, executed.message);
896
+ }
897
+ catch (error) {
898
+ return errorResult(error);
899
+ }
900
+ });
712
901
  async function main() {
713
902
  const transport = new StdioServerTransport();
714
903
  await server.connect(transport);
@@ -724,6 +913,63 @@ function buildMeta(response) {
724
913
  base_url: baseUrl
725
914
  };
726
915
  }
916
+ function resolveStartPage(pageNum, pageToken, appKey) {
917
+ if (!pageToken) {
918
+ return pageNum ?? 1;
919
+ }
920
+ const payload = decodeContinuationToken(pageToken);
921
+ if (payload.app_key !== appKey) {
922
+ throw new Error(`page_token app_key mismatch: token for ${payload.app_key}, request for ${appKey}`);
923
+ }
924
+ return payload.next_page_num;
925
+ }
926
+ function encodeContinuationToken(payload) {
927
+ return Buffer.from(JSON.stringify(payload), "utf8").toString("base64url");
928
+ }
929
+ function decodeContinuationToken(token) {
930
+ let parsed;
931
+ try {
932
+ const decoded = Buffer.from(token, "base64url").toString("utf8");
933
+ parsed = JSON.parse(decoded);
934
+ }
935
+ catch {
936
+ throw new Error("Invalid page_token");
937
+ }
938
+ const obj = asObject(parsed);
939
+ const appKey = asNullableString(obj?.app_key);
940
+ const nextPageNum = toPositiveInt(obj?.next_page_num);
941
+ const pageSize = toPositiveInt(obj?.page_size);
942
+ if (!appKey || !nextPageNum || !pageSize) {
943
+ throw new Error("Invalid page_token payload");
944
+ }
945
+ return {
946
+ app_key: appKey,
947
+ next_page_num: nextPageNum,
948
+ page_size: pageSize
949
+ };
950
+ }
951
+ function buildEvidencePayload(state, sourcePages) {
952
+ return {
953
+ query_id: state.query_id,
954
+ app_key: state.app_key,
955
+ filters: state.filters,
956
+ selected_columns: state.selected_columns,
957
+ time_range: state.time_range,
958
+ source_pages: sourcePages
959
+ };
960
+ }
961
+ function echoFilters(filters) {
962
+ return (filters ?? []).map((item) => ({
963
+ ...(item.que_id !== undefined ? { que_id: String(item.que_id) } : {}),
964
+ ...(item.search_key !== undefined ? { search_key: item.search_key } : {}),
965
+ ...(item.search_keys !== undefined ? { search_keys: item.search_keys } : {}),
966
+ ...(item.min_value !== undefined ? { min_value: item.min_value } : {}),
967
+ ...(item.max_value !== undefined ? { max_value: item.max_value } : {}),
968
+ ...(item.scope !== undefined ? { scope: item.scope } : {}),
969
+ ...(item.search_options !== undefined ? { search_options: item.search_options } : {}),
970
+ ...(item.search_user_ids !== undefined ? { search_user_ids: item.search_user_ids } : {})
971
+ }));
972
+ }
727
973
  function resolveQueryMode(args) {
728
974
  const requested = args.query_mode ?? "auto";
729
975
  if (requested !== "auto") {
@@ -752,7 +998,10 @@ function buildListArgsFromQuery(args) {
752
998
  app_key: args.app_key,
753
999
  user_id: args.user_id,
754
1000
  page_num: args.page_num,
1001
+ page_token: args.page_token,
755
1002
  page_size: args.page_size,
1003
+ requested_pages: args.requested_pages,
1004
+ scan_max_pages: args.scan_max_pages,
756
1005
  mode: args.mode,
757
1006
  type: args.type,
758
1007
  keyword: args.keyword,
@@ -760,16 +1009,20 @@ function buildListArgsFromQuery(args) {
760
1009
  apply_ids: args.apply_ids,
761
1010
  sort: args.sort,
762
1011
  filters,
1012
+ time_range: args.time_range,
763
1013
  max_rows: args.max_rows,
764
1014
  max_items: args.max_items,
765
1015
  max_columns: args.max_columns,
766
1016
  select_columns: args.select_columns,
767
- include_answers: args.include_answers
1017
+ include_answers: args.include_answers,
1018
+ strict_full: args.strict_full
768
1019
  });
769
1020
  }
770
1021
  function buildListFiltersFromQuery(args) {
771
- const filters = [...(args.filters ?? [])];
772
- const timeRange = args.time_range;
1022
+ return appendTimeRangeFilter(args.filters, args.time_range);
1023
+ }
1024
+ function appendTimeRangeFilter(inputFilters, timeRange) {
1025
+ const filters = [...(inputFilters ?? [])];
773
1026
  if (!timeRange) {
774
1027
  return filters.length > 0 ? filters : undefined;
775
1028
  }
@@ -796,6 +1049,9 @@ function buildRecordGetArgsFromQuery(args) {
796
1049
  if (args.apply_id === undefined) {
797
1050
  throw new Error("apply_id is required for record query");
798
1051
  }
1052
+ if (!args.select_columns?.length) {
1053
+ throw new Error("select_columns is required for record query");
1054
+ }
799
1055
  return recordGetInputSchema.parse({
800
1056
  apply_id: args.apply_id,
801
1057
  max_columns: args.max_columns,
@@ -803,31 +1059,61 @@ function buildRecordGetArgsFromQuery(args) {
803
1059
  });
804
1060
  }
805
1061
  async function executeRecordsList(args) {
806
- const pageNum = args.page_num ?? 1;
1062
+ const queryId = randomUUID();
1063
+ const pageNum = resolveStartPage(args.page_num, args.page_token, args.app_key);
807
1064
  const pageSize = args.page_size ?? DEFAULT_PAGE_SIZE;
1065
+ const requestedPages = args.requested_pages ?? 1;
1066
+ const scanMaxPages = args.scan_max_pages ?? requestedPages;
1067
+ const effectiveFilters = appendTimeRangeFilter(args.filters, args.time_range);
808
1068
  const normalizedSort = await normalizeListSort(args.sort, args.app_key, args.user_id);
809
1069
  const includeAnswers = true;
810
- const payload = buildListPayload({
811
- pageNum,
812
- pageSize,
813
- mode: args.mode,
814
- type: args.type,
815
- keyword: args.keyword,
816
- queryLogic: args.query_logic,
817
- applyIds: args.apply_ids,
818
- sort: normalizedSort,
819
- filters: args.filters
820
- });
821
- const response = await client.listRecords(args.app_key, payload, { userId: args.user_id });
822
- const result = asObject(response.result);
823
- const rawItems = asArray(result?.result);
1070
+ let currentPage = pageNum;
1071
+ let fetchedPages = 0;
1072
+ let hasMore = false;
1073
+ let nextPageNum = null;
1074
+ let resultAmount = null;
1075
+ let pageAmount = null;
1076
+ let responseMeta = null;
1077
+ const sourcePages = [];
1078
+ const collectedRawItems = [];
1079
+ while (fetchedPages < requestedPages && fetchedPages < scanMaxPages) {
1080
+ const payload = buildListPayload({
1081
+ pageNum: currentPage,
1082
+ pageSize,
1083
+ mode: args.mode,
1084
+ type: args.type,
1085
+ keyword: args.keyword,
1086
+ queryLogic: args.query_logic,
1087
+ applyIds: args.apply_ids,
1088
+ sort: normalizedSort,
1089
+ filters: effectiveFilters
1090
+ });
1091
+ const response = await client.listRecords(args.app_key, payload, { userId: args.user_id });
1092
+ responseMeta = responseMeta ?? buildMeta(response);
1093
+ const result = asObject(response.result);
1094
+ const rawItems = asArray(result?.result);
1095
+ collectedRawItems.push(...rawItems);
1096
+ sourcePages.push(currentPage);
1097
+ fetchedPages += 1;
1098
+ resultAmount = resultAmount ?? toNonNegativeInt(result?.resultAmount);
1099
+ pageAmount = pageAmount ?? toPositiveInt(result?.pageAmount);
1100
+ hasMore = pageAmount !== null ? currentPage < pageAmount : rawItems.length === pageSize;
1101
+ nextPageNum = hasMore ? currentPage + 1 : null;
1102
+ if (!hasMore) {
1103
+ break;
1104
+ }
1105
+ currentPage = currentPage + 1;
1106
+ }
1107
+ if (!responseMeta) {
1108
+ throw new Error("Failed to fetch list pages");
1109
+ }
1110
+ const knownResultAmount = resultAmount ?? collectedRawItems.length;
824
1111
  const listLimit = resolveListItemLimit({
825
- total: rawItems.length,
1112
+ total: collectedRawItems.length,
826
1113
  requestedMaxRows: args.max_rows,
827
- requestedMaxItems: args.max_items,
828
- includeAnswers
1114
+ requestedMaxItems: args.max_items
829
1115
  });
830
- const items = rawItems
1116
+ const items = collectedRawItems
831
1117
  .slice(0, listLimit.limit)
832
1118
  .map((raw) => normalizeRecordItem(raw, includeAnswers));
833
1119
  const columnProjection = projectRecordItemsColumns({
@@ -846,15 +1132,61 @@ async function executeRecordsList(args) {
846
1132
  limitBytes: MAX_LIST_ITEMS_BYTES
847
1133
  });
848
1134
  const truncationReason = mergeTruncationReasons(listLimit.reason, columnProjection.reason, fitted.reason);
1135
+ const omittedItems = Math.max(0, knownResultAmount - fitted.items.length);
1136
+ const isComplete = !hasMore &&
1137
+ omittedItems === 0 &&
1138
+ fitted.omittedItems === 0 &&
1139
+ fitted.omittedChars === 0;
1140
+ const nextPageToken = hasMore && nextPageNum
1141
+ ? encodeContinuationToken({
1142
+ app_key: args.app_key,
1143
+ next_page_num: nextPageNum,
1144
+ page_size: pageSize
1145
+ })
1146
+ : null;
1147
+ const completeness = {
1148
+ result_amount: knownResultAmount,
1149
+ returned_items: fitted.items.length,
1150
+ fetched_pages: fetchedPages,
1151
+ requested_pages: requestedPages,
1152
+ actual_scanned_pages: fetchedPages,
1153
+ has_more: hasMore,
1154
+ next_page_token: nextPageToken,
1155
+ is_complete: isComplete,
1156
+ partial: !isComplete,
1157
+ omitted_items: omittedItems,
1158
+ omitted_chars: fitted.omittedChars
1159
+ };
1160
+ const listState = {
1161
+ query_id: queryId,
1162
+ app_key: args.app_key,
1163
+ selected_columns: columnProjection.selectedColumns,
1164
+ filters: echoFilters(effectiveFilters),
1165
+ time_range: args.time_range
1166
+ ? {
1167
+ column: String(args.time_range.column),
1168
+ from: args.time_range.from ?? null,
1169
+ to: args.time_range.to ?? null,
1170
+ timezone: args.time_range.timezone ?? null
1171
+ }
1172
+ : null
1173
+ };
1174
+ if (args.strict_full && !isComplete) {
1175
+ throw new NeedMoreDataError("List result is incomplete. Increase requested_pages/max_rows or continue with next_page_token.", {
1176
+ code: "NEED_MORE_DATA",
1177
+ completeness,
1178
+ evidence: buildEvidencePayload(listState, sourcePages)
1179
+ });
1180
+ }
849
1181
  const responsePayload = {
850
1182
  ok: true,
851
1183
  data: {
852
1184
  app_key: args.app_key,
853
1185
  pagination: {
854
- page_num: toPositiveInt(result?.pageNum) ?? pageNum,
855
- page_size: toPositiveInt(result?.pageSize) ?? pageSize,
856
- page_amount: toNonNegativeInt(result?.pageAmount),
857
- result_amount: toNonNegativeInt(result?.resultAmount) ?? fitted.items.length
1186
+ page_num: pageNum,
1187
+ page_size: pageSize,
1188
+ page_amount: pageAmount,
1189
+ result_amount: knownResultAmount
858
1190
  },
859
1191
  items: fitted.items,
860
1192
  applied_limits: {
@@ -862,20 +1194,23 @@ async function executeRecordsList(args) {
862
1194
  row_cap: listLimit.limit,
863
1195
  column_cap: args.max_columns ?? null,
864
1196
  selected_columns: columnProjection.selectedColumns
865
- }
1197
+ },
1198
+ completeness,
1199
+ evidence: buildEvidencePayload(listState, sourcePages)
866
1200
  },
867
- meta: buildMeta(response)
1201
+ meta: responseMeta
868
1202
  };
869
1203
  return {
870
1204
  payload: responsePayload,
871
1205
  message: buildRecordsListMessage({
872
1206
  returned: fitted.items.length,
873
- total: rawItems.length,
1207
+ total: knownResultAmount,
874
1208
  truncationReason
875
1209
  })
876
1210
  };
877
1211
  }
878
1212
  async function executeRecordGet(args) {
1213
+ const queryId = randomUUID();
879
1214
  const response = await client.getRecord(String(args.apply_id));
880
1215
  const record = asObject(response.result) ?? {};
881
1216
  const projection = projectAnswersForOutput({
@@ -898,6 +1233,24 @@ async function executeRecordGet(args) {
898
1233
  applied_limits: {
899
1234
  column_cap: args.max_columns ?? null,
900
1235
  selected_columns: projection.selectedColumns
1236
+ },
1237
+ completeness: {
1238
+ result_amount: 1,
1239
+ returned_items: 1,
1240
+ fetched_pages: 1,
1241
+ requested_pages: 1,
1242
+ actual_scanned_pages: 1,
1243
+ has_more: false,
1244
+ next_page_token: null,
1245
+ is_complete: true,
1246
+ partial: false,
1247
+ omitted_items: 0,
1248
+ omitted_chars: 0
1249
+ },
1250
+ evidence: {
1251
+ query_id: queryId,
1252
+ apply_id: String(args.apply_id),
1253
+ selected_columns: projection.selectedColumns ?? []
901
1254
  }
902
1255
  },
903
1256
  meta: buildMeta(response)
@@ -912,11 +1265,15 @@ async function executeRecordsSummary(args) {
912
1265
  if (!args.select_columns?.length) {
913
1266
  throw new Error("select_columns is required for summary query");
914
1267
  }
1268
+ const queryId = randomUUID();
1269
+ const strictFull = args.strict_full ?? true;
915
1270
  const includeNegative = args.stat_policy?.include_negative ?? true;
916
1271
  const includeNull = args.stat_policy?.include_null ?? false;
917
- const scanMaxPages = args.scan_max_pages ?? 50;
1272
+ const scanMaxPages = args.scan_max_pages ?? DEFAULT_SCAN_MAX_PAGES;
1273
+ const requestedPages = args.requested_pages ?? scanMaxPages;
1274
+ const startPage = resolveStartPage(args.page_num, args.page_token, args.app_key);
918
1275
  const pageSize = args.page_size ?? DEFAULT_PAGE_SIZE;
919
- const rowCap = Math.min(args.max_rows ?? DEFAULT_PAGE_SIZE, 200);
1276
+ const rowCap = Math.min(args.max_rows ?? DEFAULT_ROW_LIMIT, DEFAULT_ROW_LIMIT);
920
1277
  const timezone = args.time_range?.timezone ?? "Asia/Shanghai";
921
1278
  const form = await getFormCached(args.app_key, args.user_id, false);
922
1279
  const index = buildFieldIndex(form.result);
@@ -938,16 +1295,33 @@ async function executeRecordsSummary(args) {
938
1295
  ...(args.time_range.to ? { max_value: args.time_range.to } : {})
939
1296
  });
940
1297
  }
941
- let currentPage = args.page_num ?? 1;
1298
+ const listState = {
1299
+ query_id: queryId,
1300
+ app_key: args.app_key,
1301
+ selected_columns: effectiveColumns.map((item) => item.requested),
1302
+ filters: echoFilters(summaryFilters),
1303
+ time_range: timeColumn
1304
+ ? {
1305
+ column: timeColumn.requested,
1306
+ from: args.time_range?.from ?? null,
1307
+ to: args.time_range?.to ?? null,
1308
+ timezone
1309
+ }
1310
+ : null
1311
+ };
1312
+ let currentPage = startPage;
942
1313
  let scannedPages = 0;
943
1314
  let scannedRecords = 0;
944
- let truncated = false;
1315
+ let hasMore = false;
1316
+ let nextPageNum = null;
1317
+ let resultAmount = null;
945
1318
  let summaryMeta = null;
946
1319
  let totalAmount = 0;
947
1320
  let missingCount = 0;
1321
+ const sourcePages = [];
948
1322
  const rows = [];
949
1323
  const byDay = new Map();
950
- while (true) {
1324
+ while (scannedPages < requestedPages && scannedPages < scanMaxPages) {
951
1325
  const payload = buildListPayload({
952
1326
  pageNum: currentPage,
953
1327
  pageSize,
@@ -962,10 +1336,13 @@ async function executeRecordsSummary(args) {
962
1336
  const response = await client.listRecords(args.app_key, payload, { userId: args.user_id });
963
1337
  summaryMeta = summaryMeta ?? buildMeta(response);
964
1338
  scannedPages += 1;
1339
+ sourcePages.push(currentPage);
965
1340
  const result = asObject(response.result);
966
1341
  const rawItems = asArray(result?.result);
967
1342
  const pageAmount = toPositiveInt(result?.pageAmount);
968
- const hasMoreByAmount = pageAmount !== null ? currentPage < pageAmount : rawItems.length === pageSize;
1343
+ resultAmount = resultAmount ?? toNonNegativeInt(result?.resultAmount);
1344
+ hasMore = pageAmount !== null ? currentPage < pageAmount : rawItems.length === pageSize;
1345
+ nextPageNum = hasMore ? currentPage + 1 : null;
969
1346
  for (const rawItem of rawItems) {
970
1347
  const record = asObject(rawItem) ?? {};
971
1348
  const answers = asArray(record.answers);
@@ -1004,14 +1381,10 @@ async function executeRecordsSummary(args) {
1004
1381
  }
1005
1382
  byDay.set(dayKey, bucket);
1006
1383
  }
1007
- if (!hasMoreByAmount) {
1008
- break;
1009
- }
1010
- if (scannedPages >= scanMaxPages) {
1011
- truncated = true;
1384
+ if (!hasMore) {
1012
1385
  break;
1013
1386
  }
1014
- currentPage += 1;
1387
+ currentPage = currentPage + 1;
1015
1388
  }
1016
1389
  const byDayStats = Array.from(byDay.entries())
1017
1390
  .sort((a, b) => a[0].localeCompare(b[0]))
@@ -1054,6 +1427,37 @@ async function executeRecordsSummary(args) {
1054
1427
  if (!summaryMeta) {
1055
1428
  throw new Error("Failed to build summary metadata");
1056
1429
  }
1430
+ const knownResultAmount = resultAmount ?? scannedRecords;
1431
+ const omittedItems = Math.max(0, knownResultAmount - scannedRecords);
1432
+ const isComplete = !hasMore && omittedItems === 0;
1433
+ const nextPageToken = hasMore && nextPageNum
1434
+ ? encodeContinuationToken({
1435
+ app_key: args.app_key,
1436
+ next_page_num: nextPageNum,
1437
+ page_size: pageSize
1438
+ })
1439
+ : null;
1440
+ const completeness = {
1441
+ result_amount: knownResultAmount,
1442
+ returned_items: scannedRecords,
1443
+ fetched_pages: scannedPages,
1444
+ requested_pages: requestedPages,
1445
+ actual_scanned_pages: scannedPages,
1446
+ has_more: hasMore,
1447
+ next_page_token: nextPageToken,
1448
+ is_complete: isComplete,
1449
+ partial: !isComplete,
1450
+ omitted_items: omittedItems,
1451
+ omitted_chars: 0
1452
+ };
1453
+ const evidence = buildEvidencePayload(listState, sourcePages);
1454
+ if (strictFull && !isComplete) {
1455
+ throw new NeedMoreDataError("Summary is incomplete. Continue with next_page_token or increase requested_pages/scan_max_pages.", {
1456
+ code: "NEED_MORE_DATA",
1457
+ completeness,
1458
+ evidence
1459
+ });
1460
+ }
1057
1461
  return {
1058
1462
  data: {
1059
1463
  summary: {
@@ -1063,6 +1467,8 @@ async function executeRecordsSummary(args) {
1063
1467
  missing_count: missingCount
1064
1468
  },
1065
1469
  rows,
1470
+ completeness,
1471
+ evidence,
1066
1472
  meta: {
1067
1473
  field_mapping: fieldMapping,
1068
1474
  filters: {
@@ -1083,7 +1489,7 @@ async function executeRecordsSummary(args) {
1083
1489
  execution: {
1084
1490
  scanned_records: scannedRecords,
1085
1491
  scanned_pages: scannedPages,
1086
- truncated,
1492
+ truncated: !isComplete,
1087
1493
  row_cap: rowCap,
1088
1494
  column_cap: args.max_columns ?? null,
1089
1495
  scan_max_pages: scanMaxPages
@@ -1091,7 +1497,219 @@ async function executeRecordsSummary(args) {
1091
1497
  }
1092
1498
  },
1093
1499
  meta: summaryMeta,
1094
- message: `Summarized ${scannedRecords} records`
1500
+ message: isComplete
1501
+ ? `Summarized ${scannedRecords} records`
1502
+ : `Summarized ${scannedRecords}/${knownResultAmount} records (partial)`
1503
+ };
1504
+ }
1505
+ async function executeRecordsAggregate(args) {
1506
+ const queryId = randomUUID();
1507
+ const strictFull = args.strict_full ?? true;
1508
+ const includeNegative = args.stat_policy?.include_negative ?? true;
1509
+ const includeNull = args.stat_policy?.include_null ?? false;
1510
+ const pageSize = args.page_size ?? DEFAULT_PAGE_SIZE;
1511
+ const scanMaxPages = args.scan_max_pages ?? DEFAULT_SCAN_MAX_PAGES;
1512
+ const requestedPages = args.requested_pages ?? scanMaxPages;
1513
+ const startPage = resolveStartPage(args.page_num, args.page_token, args.app_key);
1514
+ const maxGroups = args.max_groups ?? 200;
1515
+ const timezone = args.time_range?.timezone ?? "Asia/Shanghai";
1516
+ const form = await getFormCached(args.app_key, args.user_id, false);
1517
+ const index = buildFieldIndex(form.result);
1518
+ const groupColumns = resolveSummaryColumns(args.group_by, index, "group_by");
1519
+ const amountColumn = args.amount_column !== undefined
1520
+ ? resolveSummaryColumn(args.amount_column, index, "amount_column")
1521
+ : null;
1522
+ const timeColumn = args.time_range ? resolveSummaryColumn(args.time_range.column, index, "time_range.column") : null;
1523
+ const normalizedSort = await normalizeListSort(args.sort, args.app_key, args.user_id);
1524
+ const aggregateFilters = [...(args.filters ?? [])];
1525
+ if (timeColumn && (args.time_range?.from || args.time_range?.to)) {
1526
+ aggregateFilters.push({
1527
+ que_id: timeColumn.que_id,
1528
+ ...(args.time_range.from ? { min_value: args.time_range.from } : {}),
1529
+ ...(args.time_range.to ? { max_value: args.time_range.to } : {})
1530
+ });
1531
+ }
1532
+ const listState = {
1533
+ query_id: queryId,
1534
+ app_key: args.app_key,
1535
+ selected_columns: groupColumns.map((item) => item.requested),
1536
+ filters: echoFilters(aggregateFilters),
1537
+ time_range: timeColumn
1538
+ ? {
1539
+ column: timeColumn.requested,
1540
+ from: args.time_range?.from ?? null,
1541
+ to: args.time_range?.to ?? null,
1542
+ timezone
1543
+ }
1544
+ : null
1545
+ };
1546
+ let currentPage = startPage;
1547
+ let scannedPages = 0;
1548
+ let scannedRecords = 0;
1549
+ let hasMore = false;
1550
+ let nextPageNum = null;
1551
+ let resultAmount = null;
1552
+ let responseMeta = null;
1553
+ let totalAmount = 0;
1554
+ const sourcePages = [];
1555
+ const groupStats = new Map();
1556
+ while (scannedPages < requestedPages && scannedPages < scanMaxPages) {
1557
+ const payload = buildListPayload({
1558
+ pageNum: currentPage,
1559
+ pageSize,
1560
+ mode: args.mode,
1561
+ type: args.type,
1562
+ keyword: args.keyword,
1563
+ queryLogic: args.query_logic,
1564
+ applyIds: args.apply_ids,
1565
+ sort: normalizedSort,
1566
+ filters: aggregateFilters
1567
+ });
1568
+ const response = await client.listRecords(args.app_key, payload, { userId: args.user_id });
1569
+ responseMeta = responseMeta ?? buildMeta(response);
1570
+ scannedPages += 1;
1571
+ sourcePages.push(currentPage);
1572
+ const result = asObject(response.result);
1573
+ const rawItems = asArray(result?.result);
1574
+ const pageAmount = toPositiveInt(result?.pageAmount);
1575
+ resultAmount = resultAmount ?? toNonNegativeInt(result?.resultAmount);
1576
+ hasMore = pageAmount !== null ? currentPage < pageAmount : rawItems.length === pageSize;
1577
+ nextPageNum = hasMore ? currentPage + 1 : null;
1578
+ for (const rawItem of rawItems) {
1579
+ const record = asObject(rawItem) ?? {};
1580
+ const answers = asArray(record.answers);
1581
+ scannedRecords += 1;
1582
+ const group = {};
1583
+ for (const column of groupColumns) {
1584
+ group[column.requested] = extractSummaryColumnValue(answers, column);
1585
+ }
1586
+ const groupKey = stableJson(group);
1587
+ const bucket = groupStats.get(groupKey) ?? { group, count: 0, amount: 0 };
1588
+ bucket.count += 1;
1589
+ if (amountColumn) {
1590
+ const amountValue = extractSummaryColumnValue(answers, amountColumn);
1591
+ const numericAmount = toFiniteAmount(amountValue);
1592
+ if (numericAmount === null) {
1593
+ if (includeNull) {
1594
+ // Keep group count while amount contributes 0.
1595
+ }
1596
+ }
1597
+ else if (includeNegative || numericAmount >= 0) {
1598
+ bucket.amount += numericAmount;
1599
+ totalAmount += numericAmount;
1600
+ }
1601
+ }
1602
+ groupStats.set(groupKey, bucket);
1603
+ }
1604
+ if (!hasMore) {
1605
+ break;
1606
+ }
1607
+ currentPage = currentPage + 1;
1608
+ }
1609
+ if (!responseMeta) {
1610
+ throw new Error("Failed to fetch aggregate pages");
1611
+ }
1612
+ const knownResultAmount = resultAmount ?? scannedRecords;
1613
+ const omittedItems = Math.max(0, knownResultAmount - scannedRecords);
1614
+ const isComplete = !hasMore && omittedItems === 0;
1615
+ const nextPageToken = hasMore && nextPageNum
1616
+ ? encodeContinuationToken({
1617
+ app_key: args.app_key,
1618
+ next_page_num: nextPageNum,
1619
+ page_size: pageSize
1620
+ })
1621
+ : null;
1622
+ const completeness = {
1623
+ result_amount: knownResultAmount,
1624
+ returned_items: scannedRecords,
1625
+ fetched_pages: scannedPages,
1626
+ requested_pages: requestedPages,
1627
+ actual_scanned_pages: scannedPages,
1628
+ has_more: hasMore,
1629
+ next_page_token: nextPageToken,
1630
+ is_complete: isComplete,
1631
+ partial: !isComplete,
1632
+ omitted_items: omittedItems,
1633
+ omitted_chars: 0
1634
+ };
1635
+ const evidence = buildEvidencePayload(listState, sourcePages);
1636
+ if (strictFull && !isComplete) {
1637
+ throw new NeedMoreDataError("Aggregate result is incomplete. Continue with next_page_token or increase requested_pages/scan_max_pages.", {
1638
+ code: "NEED_MORE_DATA",
1639
+ completeness,
1640
+ evidence
1641
+ });
1642
+ }
1643
+ const groups = Array.from(groupStats.values())
1644
+ .sort((a, b) => b.count - a.count)
1645
+ .slice(0, maxGroups)
1646
+ .map((bucket) => ({
1647
+ group: bucket.group,
1648
+ count: bucket.count,
1649
+ count_ratio: scannedRecords > 0 ? bucket.count / scannedRecords : 0,
1650
+ amount_total: amountColumn ? bucket.amount : null,
1651
+ amount_ratio: amountColumn && totalAmount !== 0
1652
+ ? bucket.amount / totalAmount
1653
+ : amountColumn
1654
+ ? 0
1655
+ : null
1656
+ }));
1657
+ const fieldMapping = [
1658
+ ...groupColumns.map((item) => ({
1659
+ role: "group_by",
1660
+ requested: item.requested,
1661
+ que_id: item.que_id,
1662
+ que_title: item.que_title,
1663
+ que_type: item.que_type
1664
+ })),
1665
+ ...(amountColumn
1666
+ ? [
1667
+ {
1668
+ role: "amount",
1669
+ requested: amountColumn.requested,
1670
+ que_id: amountColumn.que_id,
1671
+ que_title: amountColumn.que_title,
1672
+ que_type: amountColumn.que_type
1673
+ }
1674
+ ]
1675
+ : []),
1676
+ ...(timeColumn
1677
+ ? [
1678
+ {
1679
+ role: "time",
1680
+ requested: timeColumn.requested,
1681
+ que_id: timeColumn.que_id,
1682
+ que_title: timeColumn.que_title,
1683
+ que_type: timeColumn.que_type
1684
+ }
1685
+ ]
1686
+ : [])
1687
+ ];
1688
+ return {
1689
+ payload: {
1690
+ ok: true,
1691
+ data: {
1692
+ app_key: args.app_key,
1693
+ summary: {
1694
+ total_count: scannedRecords,
1695
+ total_amount: amountColumn ? totalAmount : null
1696
+ },
1697
+ groups,
1698
+ completeness,
1699
+ evidence,
1700
+ meta: {
1701
+ field_mapping: fieldMapping,
1702
+ stat_policy: {
1703
+ include_negative: includeNegative,
1704
+ include_null: includeNull
1705
+ }
1706
+ }
1707
+ },
1708
+ meta: responseMeta
1709
+ },
1710
+ message: isComplete
1711
+ ? `Aggregated ${scannedRecords} records`
1712
+ : `Aggregated ${scannedRecords}/${knownResultAmount} records (partial)`
1095
1713
  };
1096
1714
  }
1097
1715
  function resolveSummaryColumns(columns, index, label) {
@@ -1539,10 +2157,10 @@ function resolveListItemLimit(params) {
1539
2157
  }
1540
2158
  return { limit, reason: null };
1541
2159
  }
1542
- if (params.includeAnswers && params.total > DEFAULT_MAX_ITEMS_WITH_ANSWERS) {
2160
+ if (params.total > DEFAULT_ROW_LIMIT) {
1543
2161
  return {
1544
- limit: DEFAULT_MAX_ITEMS_WITH_ANSWERS,
1545
- reason: `auto-limited to ${DEFAULT_MAX_ITEMS_WITH_ANSWERS} items because include_answers=true`
2162
+ limit: DEFAULT_ROW_LIMIT,
2163
+ reason: `default-limited to ${DEFAULT_ROW_LIMIT} items`
1546
2164
  };
1547
2165
  }
1548
2166
  return { limit: params.total, reason: null };
@@ -1606,17 +2224,21 @@ function projectAnswersForOutput(params) {
1606
2224
  }
1607
2225
  function fitListItemsWithinSize(params) {
1608
2226
  let candidate = params.items;
1609
- let size = jsonSizeBytes(candidate);
2227
+ const originalSize = jsonSizeBytes(candidate);
2228
+ let size = originalSize;
1610
2229
  if (size <= params.limitBytes) {
1611
- return { items: candidate, reason: null };
2230
+ return { items: candidate, reason: null, omittedItems: 0, omittedChars: 0 };
1612
2231
  }
2232
+ const originalCount = candidate.length;
1613
2233
  while (candidate.length > 1) {
1614
2234
  candidate = candidate.slice(0, candidate.length - 1);
1615
2235
  size = jsonSizeBytes(candidate);
1616
2236
  if (size <= params.limitBytes) {
1617
2237
  return {
1618
2238
  items: candidate,
1619
- reason: `auto-limited to ${candidate.length} items to keep response <= ${params.limitBytes} bytes`
2239
+ reason: `auto-limited to ${candidate.length} items to keep response <= ${params.limitBytes} bytes`,
2240
+ omittedItems: Math.max(0, originalCount - candidate.length),
2241
+ omittedChars: Math.max(0, originalSize - size)
1620
2242
  };
1621
2243
  }
1622
2244
  }
@@ -1683,6 +2305,20 @@ function normalizeQueId(queId) {
1683
2305
  }
1684
2306
  throw new Error(`Resolved que_id has unsupported type: ${typeof queId}`);
1685
2307
  }
2308
+ function stableJson(value) {
2309
+ if (value === null || value === undefined) {
2310
+ return "null";
2311
+ }
2312
+ if (typeof value !== "object") {
2313
+ return JSON.stringify(value);
2314
+ }
2315
+ if (Array.isArray(value)) {
2316
+ return `[${value.map((item) => stableJson(item)).join(",")}]`;
2317
+ }
2318
+ const obj = value;
2319
+ const keys = Object.keys(obj).sort();
2320
+ return `{${keys.map((key) => `${JSON.stringify(key)}:${stableJson(obj[key])}`).join(",")}}`;
2321
+ }
1686
2322
  function jsonSizeBytes(value) {
1687
2323
  return Buffer.byteLength(JSON.stringify(value), "utf8");
1688
2324
  }
@@ -1714,6 +2350,15 @@ function errorResult(error) {
1714
2350
  };
1715
2351
  }
1716
2352
  function toErrorPayload(error) {
2353
+ if (error instanceof NeedMoreDataError) {
2354
+ return {
2355
+ ok: false,
2356
+ code: error.code,
2357
+ status: "need_more_data",
2358
+ message: error.message,
2359
+ details: error.details
2360
+ };
2361
+ }
1717
2362
  if (error instanceof QingflowApiError) {
1718
2363
  return {
1719
2364
  ok: false,
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "qingflow-mcp",
3
- "version": "0.2.7",
3
+ "version": "0.3.0",
4
4
  "private": false,
5
5
  "license": "MIT",
6
6
  "type": "module",