@josephyan/qingflow-app-user-mcp 0.2.0-beta.97 → 0.2.0-beta.981

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -3,13 +3,13 @@
3
3
  Install:
4
4
 
5
5
  ```bash
6
- npm install @josephyan/qingflow-app-user-mcp@0.2.0-beta.97
6
+ npm install @josephyan/qingflow-app-user-mcp@0.2.0-beta.981
7
7
  ```
8
8
 
9
9
  Run:
10
10
 
11
11
  ```bash
12
- npx -y -p @josephyan/qingflow-app-user-mcp@0.2.0-beta.97 qingflow-app-user-mcp
12
+ npx -y -p @josephyan/qingflow-app-user-mcp@0.2.0-beta.981 qingflow-app-user-mcp
13
13
  ```
14
14
 
15
15
  Environment:
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@josephyan/qingflow-app-user-mcp",
3
- "version": "0.2.0-beta.97",
3
+ "version": "0.2.0-beta.981",
4
4
  "description": "Operational end-user MCP for Qingflow records, tasks, comments, and directory workflows.",
5
5
  "license": "MIT",
6
6
  "type": "module",
package/pyproject.toml CHANGED
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "qingflow-mcp"
7
- version = "0.2.0b97"
7
+ version = "0.2.0b981"
8
8
  description = "User-authenticated MCP server for Qingflow"
9
9
  readme = "README.md"
10
10
  license = "MIT"
@@ -5,7 +5,7 @@ from pathlib import Path
5
5
 
6
6
  __all__ = ["__version__"]
7
7
 
8
- _FALLBACK_VERSION = "0.2.0b97"
8
+ _FALLBACK_VERSION = "0.2.0b981"
9
9
 
10
10
 
11
11
  def _resolve_local_pyproject_version() -> str | None:
@@ -445,6 +445,7 @@ class AiBuilderFacade:
445
445
  base = base_result.get("result") if isinstance(base_result.get("result"), dict) else {}
446
446
  summary = detail_result.get("summary") if isinstance(detail_result, dict) and isinstance(detail_result.get("summary"), dict) else {}
447
447
  source = detail if detail else base
448
+ layout_tag_items = _select_package_layout_tag_items(detail=detail, base=base)
448
449
  warnings: list[JSONObject] = []
449
450
  if detail_read_error is not None:
450
451
  warnings.append(
@@ -455,7 +456,7 @@ class AiBuilderFacade:
455
456
  "http_status": detail_read_error.http_status,
456
457
  }
457
458
  )
458
- public_items = _public_package_items_from_tag_items(source.get("tagItems") or base.get("tagItems"))
459
+ public_items = _public_package_items_from_tag_items(layout_tag_items)
459
460
  item_count = summary.get("itemCount")
460
461
  if not isinstance(item_count, int) or item_count < 0 or (item_count == 0 and public_items):
461
462
  item_count = len(public_items)
@@ -886,9 +887,7 @@ class AiBuilderFacade:
886
887
  if isinstance(current_base_result, dict) and isinstance(current_base_result.get("result"), dict)
887
888
  else {}
888
889
  )
889
- detail_tag_items = detail_raw.get("tagItems") if isinstance(detail_raw.get("tagItems"), list) else None
890
- base_tag_items = base_raw.get("tagItems") if isinstance(base_raw.get("tagItems"), list) else None
891
- raw_tag_items = detail_tag_items if detail_tag_items else base_tag_items
890
+ raw_tag_items = _select_package_layout_tag_items(detail=detail_raw, base=base_raw)
892
891
  if not isinstance(raw_tag_items, list):
893
892
  return _failed(
894
893
  "PACKAGE_LAYOUT_UNREADABLE",
@@ -12485,6 +12484,26 @@ def _public_package_items_from_tag_items(tag_items: Any) -> list[JSONObject]:
12485
12484
  return public_items
12486
12485
 
12487
12486
 
12487
+ def _select_package_layout_tag_items(*, detail: Any, base: Any) -> list[Any] | None:
12488
+ base_tag_items = base.get("tagItems") if isinstance(base, dict) and isinstance(base.get("tagItems"), list) else None
12489
+ detail_tag_items = detail.get("tagItems") if isinstance(detail, dict) and isinstance(detail.get("tagItems"), list) else None
12490
+ if _package_tag_items_include_groups(base_tag_items):
12491
+ return deepcopy(base_tag_items)
12492
+ if _package_tag_items_include_groups(detail_tag_items):
12493
+ return deepcopy(detail_tag_items)
12494
+ if detail_tag_items is not None:
12495
+ return deepcopy(detail_tag_items)
12496
+ if base_tag_items is not None:
12497
+ return deepcopy(base_tag_items)
12498
+ return None
12499
+
12500
+
12501
+ def _package_tag_items_include_groups(tag_items: Any) -> bool:
12502
+ if not isinstance(tag_items, list):
12503
+ return False
12504
+ return any(isinstance(item, dict) and _coerce_positive_int(item.get("itemType")) == 3 for item in tag_items)
12505
+
12506
+
12488
12507
  def _flatten_package_resource_identities(items: Any, *, public: bool) -> set[tuple[str, str]]:
12489
12508
  flattened: set[tuple[str, str]] = set()
12490
12509
 
@@ -177,10 +177,10 @@ def _format_task_list(result: dict[str, Any]) -> str:
177
177
  str(item.get("app_key") or ""),
178
178
  str(item.get("record_id") or ""),
179
179
  str(item.get("workflow_node_id") or ""),
180
- str(item.get("title") or item.get("task_name") or ""),
180
+ str(item.get("workflow_node_name") or ""),
181
181
  ]
182
182
  )
183
- output = _render_titled_table("Tasks", ["app_key", "record_id", "node_id", "title"], rows)
183
+ output = _render_titled_table("Tasks", ["app_key", "record_id", "node_id", "node_name"], rows)
184
184
  lines = output.rstrip("\n").split("\n")
185
185
  _append_warnings(lines, result.get("warnings"))
186
186
  return "\n".join(lines) + "\n"
@@ -189,34 +189,48 @@ def _format_task_list(result: dict[str, Any]) -> str:
189
189
  def _format_task_get(result: dict[str, Any]) -> str:
190
190
  data = result.get("data") if isinstance(result.get("data"), dict) else {}
191
191
  task = data.get("task") if isinstance(data.get("task"), dict) else {}
192
- record = data.get("record") if isinstance(data.get("record"), dict) else {}
193
- capabilities = data.get("capabilities") if isinstance(data.get("capabilities"), dict) else {}
194
- update_schema = data.get("update_schema") if isinstance(data.get("update_schema"), dict) else {}
195
- writable_fields = update_schema.get("writable_fields") if isinstance(update_schema.get("writable_fields"), list) else []
192
+ record_summary = data.get("record_summary") if isinstance(data.get("record_summary"), dict) else {}
193
+ editable_fields = data.get("editable_fields") if isinstance(data.get("editable_fields"), list) else []
194
+ available_actions = data.get("available_actions") if isinstance(data.get("available_actions"), list) else []
195
+ extras = data.get("extras") if isinstance(data.get("extras"), dict) else {}
196
+ initiator = task.get("initiator") if isinstance(task.get("initiator"), dict) else {}
197
+ initiator_label = initiator.get("displayName") or initiator.get("email") or "-"
196
198
  lines = [
197
199
  f"Task: {task.get('app_key') or '-'} / {task.get('record_id') or '-'} / {task.get('workflow_node_id') or '-'}",
198
200
  f"Node: {task.get('workflow_node_name') or '-'}",
199
- f"Apply Status: {record.get('apply_status')}",
200
- f"Available Actions: {', '.join(str(item) for item in (capabilities.get('available_actions') or [])) or '-'}",
201
- f"Editable Fields: {len(writable_fields)}",
201
+ f"App: {task.get('app_name') or '-'}",
202
+ f"Initiator: {initiator_label}",
203
+ f"Apply Status: {record_summary.get('apply_status')}",
204
+ f"Available Actions: {', '.join(str(item) for item in available_actions) or '-'}",
205
+ f"Editable Fields: {len(editable_fields)}",
202
206
  ]
203
- if writable_fields:
204
- for item in writable_fields[:10]:
207
+ core_fields = record_summary.get("core_fields") if isinstance(record_summary.get("core_fields"), dict) else {}
208
+ if core_fields:
209
+ lines.append("Core Fields:")
210
+ for key, value in list(core_fields.items())[:12]:
211
+ lines.append(f"- {key}: {value}")
212
+ if editable_fields:
213
+ lines.append("Editable Fields:")
214
+ for item in editable_fields[:10]:
205
215
  if isinstance(item, dict):
206
216
  lines.append(f"- {item.get('title') or '-'} ({item.get('kind') or 'field'})")
207
- blockers = update_schema.get("blockers") if isinstance(update_schema.get("blockers"), list) else []
208
- if blockers:
209
- lines.append("Update Schema Blockers:")
210
- for item in blockers:
211
- lines.append(f"- {item}")
212
- schema_warnings = update_schema.get("warnings") if isinstance(update_schema.get("warnings"), list) else []
213
- if schema_warnings:
214
- lines.append("Update Schema Warnings:")
215
- for item in schema_warnings:
217
+ associated_reports = extras.get("associated_reports") if isinstance(extras.get("associated_reports"), dict) else {}
218
+ rollback_candidates = extras.get("rollback_candidates") if isinstance(extras.get("rollback_candidates"), dict) else {}
219
+ transfer_candidates = extras.get("transfer_candidates") if isinstance(extras.get("transfer_candidates"), dict) else {}
220
+ lines.append(
221
+ "Extras: "
222
+ f"reports={associated_reports.get('count', 0)}, "
223
+ f"rollback={rollback_candidates.get('count', 0)}, "
224
+ f"transfer={transfer_candidates.get('count', 0)}"
225
+ )
226
+ transfer_items = transfer_candidates.get("items") if isinstance(transfer_candidates.get("items"), list) else []
227
+ if transfer_items:
228
+ lines.append("Transfer Candidates:")
229
+ for item in transfer_items:
216
230
  if isinstance(item, dict):
217
- lines.append(f"- {item.get('code') or 'WARNING'}: {item.get('message') or ''}".rstrip())
218
- else:
219
- lines.append(f"- {item}")
231
+ display = item.get("name") or item.get("uid") or item
232
+ suffix = f" <{item.get('email')}>" if item.get("email") else ""
233
+ lines.append(f"- {display}{suffix} (uid={item.get('uid') or '-'})")
220
234
  _append_warnings(lines, result.get("warnings"))
221
235
  return "\n".join(lines) + "\n"
222
236
 
@@ -226,17 +240,22 @@ def _format_import_verify(result: dict[str, Any]) -> str:
226
240
  f"App Key: {result.get('app_key') or '-'}",
227
241
  f"File: {result.get('file_name') or result.get('file_path') or '-'}",
228
242
  f"Can Import: {result.get('can_import')}",
229
- f"Apply Rows: {result.get('apply_rows')}",
230
243
  f"Verification ID: {result.get('verification_id') or '-'}",
231
244
  ]
232
- issues = result.get("issues") if isinstance(result.get("issues"), list) else []
233
- if issues:
234
- lines.append("Issues:")
235
- for issue in issues:
236
- if isinstance(issue, dict):
237
- lines.append(f"- {issue.get('code') or 'ISSUE'}: {issue.get('message') or issue}")
238
- else:
239
- lines.append(f"- {issue}")
245
+ issue_summary = result.get("issue_summary") if isinstance(result.get("issue_summary"), dict) else {}
246
+ if issue_summary:
247
+ lines.append(
248
+ "Issues: "
249
+ f"total={issue_summary.get('total', 0)}, "
250
+ f"errors={issue_summary.get('errors', 0)}, "
251
+ f"warnings={issue_summary.get('warnings', 0)}"
252
+ )
253
+ sample = issue_summary.get("sample") if isinstance(issue_summary.get("sample"), list) else []
254
+ if sample:
255
+ lines.append("Issue Samples:")
256
+ for item in sample:
257
+ if isinstance(item, dict):
258
+ lines.append(f"- {item.get('code') or 'ISSUE'}: {item.get('message') or ''}".rstrip())
240
259
  _append_warnings(lines, result.get("warnings"))
241
260
  _append_verification(lines, result.get("verification"))
242
261
  return "\n".join(lines) + "\n"
@@ -247,8 +266,9 @@ def _format_import_status(result: dict[str, Any]) -> str:
247
266
  f"Status: {result.get('status') or '-'}",
248
267
  f"Import ID: {result.get('import_id') or '-'}",
249
268
  f"Process ID: {result.get('process_id_str') or '-'}",
250
- f"Success Rows: {result.get('success_rows') or 0}",
251
- f"Failed Rows: {result.get('failed_rows') or 0}",
269
+ f"Total Rows: {result.get('total') or 0}",
270
+ f"Finished Rows: {result.get('finished') or 0}",
271
+ f"Failed Rows: {result.get('failed') or 0}",
252
272
  f"Progress: {result.get('progress') or '-'}",
253
273
  ]
254
274
  _append_warnings(lines, result.get("warnings"))
@@ -293,34 +293,150 @@ def _trim_file_upload_local(payload: JSONObject) -> None:
293
293
 
294
294
 
295
295
  def _trim_import_schema(payload: JSONObject) -> None:
296
- pass
296
+ columns: list[JSONObject] | None = None
297
+ if isinstance(payload.get("columns"), list):
298
+ columns = [item for item in payload.get("columns", []) if isinstance(item, dict)]
299
+ elif isinstance(payload.get("expected_columns"), list):
300
+ columns = [item for item in payload.get("expected_columns", []) if isinstance(item, dict)]
301
+ if columns is not None:
302
+ payload["columns"] = [_compact_import_column(item) for item in columns]
303
+ payload.pop("expected_columns", None)
304
+ payload.pop("schema_fingerprint", None)
305
+ payload.pop("import_capability", None)
306
+ payload.pop("request_route", None)
307
+ payload.pop("verification", None)
308
+
309
+ if _looks_like_import_verify(payload):
310
+ _trim_import_verify_payload(payload)
311
+ return
312
+ if "applied_repairs" in payload or "repaired_file_path" in payload:
313
+ _trim_import_repair_payload(payload)
314
+ return
315
+ if "template_url" in payload or "downloaded_to_path" in payload:
316
+ _trim_import_template_payload(payload)
317
+ return
318
+ if "import_id" in payload or "process_id_str" in payload:
319
+ _trim_import_status_payload(payload)
320
+ return
297
321
 
298
322
 
299
323
  def _trim_record_schema(payload: JSONObject) -> None:
300
324
  payload.pop("legacy_schema", None)
325
+ template_map = payload.get("payload_template")
326
+ if not isinstance(template_map, dict):
327
+ template_map = None
328
+
329
+ if "writable_fields" in payload:
330
+ writable_fields = payload.get("writable_fields")
331
+ payload.pop("writable_fields", None)
332
+ required_fields: list[JSONObject] = []
333
+ optional_fields: list[JSONObject] = []
334
+ if isinstance(writable_fields, list):
335
+ for item in writable_fields:
336
+ compact = _compact_schema_field(item, template_map=template_map)
337
+ if not compact:
338
+ continue
339
+ if compact.get("required") is True:
340
+ required_fields.append(compact)
341
+ else:
342
+ optional_fields.append(compact)
343
+ payload["required_fields"] = required_fields
344
+ payload["optional_fields"] = optional_fields
345
+
346
+ for key in ("required_fields", "optional_fields", "runtime_linked_required_fields", "fields"):
347
+ if key in payload:
348
+ payload[key] = _compact_schema_fields(payload.get(key), template_map=template_map)
349
+
350
+ for key in ("suggested_dimensions", "suggested_metrics", "suggested_time_fields"):
351
+ if isinstance(payload.get(key), list):
352
+ payload[key] = [
353
+ _pick(item, ("field_id", "title")) for item in payload.get(key) if isinstance(item, dict)
354
+ ]
355
+
356
+ for key in ("workflow_node", "view_resolution", "field_count", "record_context_probe", "view_probe_summary", "ambiguous_fields"):
357
+ payload.pop(key, None)
301
358
 
302
359
 
303
360
  def _trim_record_write(payload: JSONObject) -> None:
304
361
  data = payload.get("data")
305
362
  if not isinstance(data, dict):
306
363
  return
364
+ data.pop("debug", None)
307
365
  data.pop("normalized_payload", None)
308
366
  data.pop("human_review", None)
309
367
  data.pop("action", None)
368
+ resource = _compact_record_resource(data.get("resource"))
369
+ if resource:
370
+ data["resource"] = resource
371
+ else:
372
+ data.pop("resource", None)
310
373
 
311
374
 
312
375
  def _trim_record_get(payload: JSONObject) -> None:
313
- _keep_nested_keys(payload, ("data", "selection", "view"), allowed=("view_id", "name"))
314
- _drop_nested_keys(payload, ("data", "selection"), keys=("columns", "workflow_node_id"))
376
+ data = payload.get("data")
377
+ if not isinstance(data, dict):
378
+ return
379
+ compact: dict[str, Any] = {}
380
+ app_key = data.get("app_key")
381
+ if app_key:
382
+ compact["app_key"] = app_key
383
+ record_id = data.get("record_id")
384
+ if record_id not in (None, ""):
385
+ compact["record_id"] = str(record_id)
386
+ record = data.get("record")
387
+ if isinstance(record, dict):
388
+ compact["record"] = record
389
+ payload["data"] = compact
315
390
 
316
391
 
317
392
  def _trim_record_list(payload: JSONObject) -> None:
318
- _keep_nested_keys(payload, ("data", "selection", "view"), allowed=("view_id", "name"))
319
- _drop_nested_keys(payload, ("data", "selection"), keys=("columns",))
393
+ data = payload.get("data")
394
+ if not isinstance(data, dict):
395
+ return
396
+ pagination = data.get("pagination") if isinstance(data.get("pagination"), dict) else {}
397
+ returned_items = pagination.get("returned_items")
398
+ result_amount = pagination.get("result_amount")
399
+ limit = pagination.get("limit")
400
+ truncated = False
401
+ if isinstance(result_amount, int) and isinstance(returned_items, int):
402
+ truncated = result_amount > returned_items
403
+ compact_pagination = {
404
+ "loaded": True,
405
+ "page_size": limit,
406
+ "fetched_pages": 1,
407
+ "reported_total": result_amount,
408
+ "truncated": truncated,
409
+ }
410
+ selection = data.get("selection") if isinstance(data.get("selection"), dict) else {}
411
+ view = selection.get("view") if isinstance(selection.get("view"), dict) else {}
412
+ compact: dict[str, Any] = {
413
+ "app_key": data.get("app_key"),
414
+ "items": data.get("items") if isinstance(data.get("items"), list) else [],
415
+ "pagination": compact_pagination,
416
+ }
417
+ if view:
418
+ compact["view"] = _pick(view, ("view_id", "name"))
419
+ payload["data"] = compact
320
420
 
321
421
 
322
422
  def _trim_record_analyze(payload: JSONObject) -> None:
323
- _drop_deep_keys(payload, {"debug"})
423
+ summary: dict[str, Any] = {}
424
+ completeness = payload.get("completeness")
425
+ if isinstance(completeness, dict):
426
+ summary["completeness"] = completeness
427
+ presentation = payload.get("presentation")
428
+ if isinstance(presentation, dict):
429
+ summary["presentation"] = presentation
430
+ ranking = payload.get("ranking")
431
+ if isinstance(ranking, dict):
432
+ summary["ranking"] = ranking
433
+ error = payload.get("error")
434
+ if isinstance(error, dict):
435
+ summary["error"] = error
436
+ if summary:
437
+ payload["summary"] = summary
438
+ for key in ("query", "ranking", "ratios", "completeness", "presentation", "error", "debug"):
439
+ payload.pop(key, None)
324
440
 
325
441
 
326
442
  def _trim_code_block_schema(payload: JSONObject) -> None:
@@ -340,6 +456,211 @@ def _trim_task_get(payload: JSONObject) -> None:
340
456
  _drop_deep_keys(payload, {"request_route", "output_profile"})
341
457
 
342
458
 
459
+ def _trim_task_context_detail(payload: JSONObject) -> None:
460
+ _drop_deep_keys(payload, {"request_route", "output_profile"})
461
+
462
+
463
+ def _trim_record_delete(payload: JSONObject) -> None:
464
+ data = payload.get("data")
465
+ if not isinstance(data, dict):
466
+ return
467
+ resource = data.get("resource")
468
+ deleted_ids: list[str] = []
469
+ if isinstance(resource, dict):
470
+ raw_ids = resource.get("record_ids") or resource.get("apply_ids") or resource.get("applyIds")
471
+ if isinstance(raw_ids, list):
472
+ deleted_ids = [str(item) for item in raw_ids if item not in (None, "")]
473
+ data["deleted_ids"] = deleted_ids
474
+ data.setdefault("failed_ids", [])
475
+ for key in (
476
+ "resource",
477
+ "action",
478
+ "normalized_payload",
479
+ "human_review",
480
+ "verification",
481
+ "blockers",
482
+ "field_errors",
483
+ "confirmation_requests",
484
+ "resolved_fields",
485
+ "debug",
486
+ ):
487
+ data.pop(key, None)
488
+
489
+
490
+ def _compact_record_resource(resource: Any) -> dict[str, Any] | None:
491
+ if not isinstance(resource, dict):
492
+ return None
493
+ compact: dict[str, Any] = {}
494
+ if resource.get("type") not in (None, ""):
495
+ compact["type"] = resource.get("type")
496
+ app_key = resource.get("app_key") or resource.get("appKey")
497
+ if app_key not in (None, ""):
498
+ compact["app_key"] = app_key
499
+ record_id = resource.get("record_id")
500
+ if record_id not in (None, ""):
501
+ compact["record_id"] = str(record_id)
502
+ apply_id = resource.get("apply_id") or resource.get("applyId")
503
+ if apply_id not in (None, "") and "record_id" not in compact:
504
+ compact["record_id"] = str(apply_id)
505
+ record_ids = resource.get("record_ids")
506
+ if isinstance(record_ids, list):
507
+ compact["record_ids"] = [str(item) for item in record_ids if item not in (None, "")]
508
+ apply_ids = resource.get("apply_ids") or resource.get("applyIds")
509
+ if isinstance(apply_ids, list) and "record_ids" not in compact:
510
+ compact["record_ids"] = [str(item) for item in apply_ids if item not in (None, "")]
511
+ return compact or None
512
+
513
+
514
+ def _compact_schema_fields(items: Any, *, template_map: dict[str, Any] | None) -> list[JSONObject]:
515
+ if not isinstance(items, list):
516
+ return []
517
+ compacted: list[JSONObject] = []
518
+ for item in items:
519
+ compact = _compact_schema_field(item, template_map=template_map)
520
+ if compact:
521
+ compacted.append(compact)
522
+ return compacted
523
+
524
+
525
+ def _compact_schema_field(item: Any, *, template_map: dict[str, Any] | None) -> JSONObject | None:
526
+ if not isinstance(item, dict):
527
+ return None
528
+ compact: dict[str, Any] = {}
529
+ field_id = item.get("field_id")
530
+ if field_id not in (None, ""):
531
+ compact["field_id"] = field_id
532
+ title = item.get("title")
533
+ if title not in (None, ""):
534
+ compact["title"] = title
535
+ kind = item.get("kind") or item.get("write_kind")
536
+ if kind not in (None, ""):
537
+ compact["kind"] = kind
538
+ if "required" in item:
539
+ compact["required"] = bool(item.get("required"))
540
+ if template_map is not None and isinstance(title, str) and title in template_map:
541
+ compact["template"] = template_map.get(title)
542
+ candidate_hint = item.get("candidate_hint")
543
+ if isinstance(candidate_hint, dict):
544
+ compact["candidate_hint"] = candidate_hint
545
+ options = item.get("options")
546
+ if isinstance(options, list) and options:
547
+ compact["options"] = options
548
+ target_app_key = item.get("target_app_key")
549
+ if isinstance(target_app_key, str) and target_app_key:
550
+ compact["target_app_key"] = target_app_key
551
+ searchable_fields = item.get("searchable_fields")
552
+ if isinstance(searchable_fields, list) and searchable_fields:
553
+ compact["searchable_fields"] = searchable_fields
554
+ row_fields = item.get("row_fields")
555
+ if isinstance(row_fields, list) and row_fields:
556
+ compact["row_fields"] = _compact_schema_fields(row_fields, template_map=None)
557
+ return compact or None
558
+
559
+
560
+ def _compact_import_column(item: dict[str, Any]) -> dict[str, Any]:
561
+ compact: dict[str, Any] = {
562
+ "title": item.get("title"),
563
+ "kind": item.get("kind") or item.get("write_kind"),
564
+ "required": bool(item.get("required")),
565
+ }
566
+ options = item.get("options")
567
+ if isinstance(options, list) and options:
568
+ compact["options"] = options
569
+ if bool(item.get("accepts_natural_input")):
570
+ compact["accepts_natural_input"] = True
571
+ if bool(item.get("requires_upload")):
572
+ compact["requires_upload"] = True
573
+ target_app_key = item.get("target_app_key")
574
+ if isinstance(target_app_key, str) and target_app_key:
575
+ compact["target_app_key"] = target_app_key
576
+ target_app_name = item.get("target_app_name")
577
+ if isinstance(target_app_name, str) and target_app_name:
578
+ compact["target_app_name"] = target_app_name
579
+ searchable_fields = item.get("searchable_fields")
580
+ if isinstance(searchable_fields, list) and searchable_fields:
581
+ compact["searchable_fields"] = searchable_fields
582
+ return compact
583
+
584
+
585
+ def _looks_like_import_verify(payload: JSONObject) -> bool:
586
+ return "verification_id" in payload and "can_import" in payload
587
+
588
+
589
+ def _trim_import_verify_payload(payload: JSONObject) -> None:
590
+ issues = payload.get("issues") if isinstance(payload.get("issues"), list) else []
591
+ issue_summary = _summarize_import_issues(issues)
592
+ payload["issue_summary"] = issue_summary
593
+ file_name = payload.get("file_name")
594
+ if not file_name:
595
+ file_path = payload.get("file_path")
596
+ if isinstance(file_path, str) and file_path:
597
+ payload["file_name"] = file_path.split("/")[-1]
598
+ for key in ("issues", "apply_rows", "expected_columns", "schema_fingerprint", "import_capability", "file_path", "file_sha256", "verified_file_sha256", "file_format", "auto_normalized", "local_precheck_limited"):
599
+ payload.pop(key, None)
600
+
601
+
602
+ def _trim_import_repair_payload(payload: JSONObject) -> None:
603
+ payload["verification_id"] = payload.get("new_verification_id") or payload.get("verification_id")
604
+ for key in ("new_verification_id", "source_file_path", "repaired_file_path", "post_repair_issues", "verification"):
605
+ payload.pop(key, None)
606
+
607
+
608
+ def _trim_import_template_payload(payload: JSONObject) -> None:
609
+ for key in ("schema_fingerprint", "verification"):
610
+ payload.pop(key, None)
611
+
612
+
613
+ def _trim_import_status_payload(payload: JSONObject) -> None:
614
+ total_rows = payload.get("total_rows")
615
+ success_rows = payload.get("success_rows")
616
+ failed_rows = payload.get("failed_rows")
617
+ payload["total"] = total_rows
618
+ payload["finished"] = success_rows
619
+ payload["failed"] = failed_rows
620
+ for key in (
621
+ "matched_by",
622
+ "source_file_name",
623
+ "total_rows",
624
+ "success_rows",
625
+ "failed_rows",
626
+ "error_file_urls",
627
+ "operate_time",
628
+ "operate_user",
629
+ "verification",
630
+ ):
631
+ payload.pop(key, None)
632
+
633
+
634
+ def _summarize_import_issues(issues: list[Any]) -> dict[str, Any]:
635
+ total = 0
636
+ error_count = 0
637
+ warning_count = 0
638
+ sample: list[dict[str, Any]] = []
639
+ for item in issues:
640
+ if not isinstance(item, dict):
641
+ continue
642
+ total += 1
643
+ severity = str(item.get("severity") or "").lower()
644
+ if severity == "error":
645
+ error_count += 1
646
+ if severity == "warning":
647
+ warning_count += 1
648
+ if len(sample) < 3:
649
+ sample.append(
650
+ {
651
+ "code": item.get("code"),
652
+ "message": item.get("message"),
653
+ "severity": item.get("severity"),
654
+ }
655
+ )
656
+ return {
657
+ "total": total,
658
+ "errors": error_count,
659
+ "warnings": warning_count,
660
+ "sample": sample,
661
+ }
662
+
663
+
343
664
  def _trim_directory(payload: JSONObject) -> None:
344
665
  pass
345
666
 
@@ -409,15 +730,15 @@ _register_policy((USER_DOMAIN,), ("record_list",), _trim_record_list)
409
730
  _register_policy((USER_DOMAIN,), ("record_analyze",), _trim_record_analyze)
410
731
  _register_policy((USER_DOMAIN,), ("record_code_block_run",), _trim_code_block_run)
411
732
  _register_policy((USER_DOMAIN,), ("task_list",), _trim_task_list)
733
+ _register_policy((USER_DOMAIN,), ("task_get",), _trim_task_get)
412
734
  _register_policy(
413
735
  (USER_DOMAIN,),
414
736
  (
415
- "task_get",
416
737
  "task_action_execute",
417
738
  "task_associated_report_detail_get",
418
739
  "task_workflow_log_get",
419
740
  ),
420
- _trim_task_get,
741
+ _trim_task_context_detail,
421
742
  )
422
743
  _register_policy(
423
744
  (USER_DOMAIN,),
@@ -438,10 +759,10 @@ _register_policy(
438
759
  (
439
760
  "record_member_candidates",
440
761
  "record_department_candidates",
441
- "record_delete",
442
762
  ),
443
763
  _trim_builder_list_like,
444
764
  )
765
+ _register_policy((USER_DOMAIN,), ("record_delete",), _trim_record_delete)
445
766
  _register_policy(
446
767
  (BUILDER_DOMAIN,),
447
768
  (
@@ -1025,6 +1025,7 @@ class RecordTools(ToolBase):
1025
1025
  else:
1026
1026
  required = bool(required_override) if required_override is not None else bool(field.required)
1027
1027
  payload: JSONObject = {
1028
+ "field_id": field.que_id,
1028
1029
  "title": field.que_title,
1029
1030
  "kind": kind,
1030
1031
  "required": required,
@@ -1,6 +1,7 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  import json
4
+ import re
4
5
  from typing import Any
5
6
  from uuid import uuid4
6
7
 
@@ -218,6 +219,7 @@ class TaskContextTools(ToolBase):
218
219
  include_associated_reports=include_associated_reports,
219
220
  current_uid=session_profile.uid,
220
221
  )
222
+ data = self._compact_task_get_context(data)
221
223
  return {
222
224
  "profile": profile,
223
225
  "ws_id": session_profile.selected_ws_id,
@@ -1213,8 +1215,9 @@ class TaskContextTools(ToolBase):
1213
1215
  f"/app/{app_key}/apply/{record_id}",
1214
1216
  params={"role": 3, "listType": 1, "auditNodeId": workflow_node_id},
1215
1217
  )
1218
+ app_name = self._task_app_name(detail, node_info)
1216
1219
  associated_report_visible = self._resolve_associated_report_visible(node_info, detail)
1217
- associated_reports = {"visible": associated_report_visible, "count": 0, "items": []}
1220
+ associated_reports = {"visible": associated_report_visible, "loaded": False, "count": 0, "items": []}
1218
1221
  if include_associated_reports and associated_report_visible:
1219
1222
  asos_chart_list = self.backend.request(
1220
1223
  "GET",
@@ -1229,11 +1232,21 @@ class TaskContextTools(ToolBase):
1229
1232
  ]
1230
1233
  associated_reports = {
1231
1234
  "visible": True,
1235
+ "loaded": True,
1232
1236
  "count": len(associated_items),
1233
1237
  "items": associated_items,
1234
1238
  }
1235
1239
  rollback_items: list[dict[str, Any]] = []
1236
1240
  transfer_items: list[dict[str, Any]] = []
1241
+ transfer_warnings: list[JSONObject] = []
1242
+ transfer_pagination: JSONObject = {
1243
+ "loaded": False,
1244
+ "page_size": 100,
1245
+ "fetched_pages": 0,
1246
+ "reported_total": None,
1247
+ "page_amount": None,
1248
+ "truncated": False,
1249
+ }
1237
1250
  if include_candidates:
1238
1251
  rollback_result = self.backend.request(
1239
1252
  "GET",
@@ -1242,13 +1255,13 @@ class TaskContextTools(ToolBase):
1242
1255
  params={"auditNodeId": workflow_node_id},
1243
1256
  )
1244
1257
  rollback_items = self._rollback_candidate_items(rollback_result)
1245
- transfer_result = self.backend.request(
1246
- "GET",
1258
+ transfer_items, transfer_warnings, transfer_pagination = self._transfer_candidate_items(
1247
1259
  context,
1248
- f"/app/{app_key}/apply/{record_id}/transfer/member",
1249
- params={"pageNum": 1, "pageSize": 20, "auditNodeId": workflow_node_id},
1260
+ app_key=app_key,
1261
+ record_id=record_id,
1262
+ workflow_node_id=workflow_node_id,
1263
+ current_uid=current_uid,
1250
1264
  )
1251
- transfer_items = self._filter_transfer_members(_approval_page_items(transfer_result), current_uid=current_uid)
1252
1265
 
1253
1266
  update_schema_state = self._build_task_update_schema(
1254
1267
  profile=profile,
@@ -1275,6 +1288,7 @@ class TaskContextTools(ToolBase):
1275
1288
  return {
1276
1289
  "task": {
1277
1290
  "app_key": app_key,
1291
+ "app_name": app_name,
1278
1292
  "record_id": record_id,
1279
1293
  "workflow_node_id": workflow_node_id,
1280
1294
  "workflow_node_name": node_info.get("auditNodeName") or node_info.get("nodeName"),
@@ -1306,6 +1320,9 @@ class TaskContextTools(ToolBase):
1306
1320
  "candidates": {
1307
1321
  "rollback_nodes": rollback_items,
1308
1322
  "transfer_members": transfer_items,
1323
+ "loaded": include_candidates,
1324
+ "transfer_pagination": transfer_pagination,
1325
+ "warnings": transfer_warnings,
1309
1326
  },
1310
1327
  "workflow_log_summary": {
1311
1328
  "visible": visibility["audit_record_visible"],
@@ -1316,16 +1333,229 @@ class TaskContextTools(ToolBase):
1316
1333
  "update_schema": update_schema,
1317
1334
  }
1318
1335
 
1336
+ def _compact_task_get_context(self, data: dict[str, Any]) -> dict[str, Any]:
1337
+ task = data.get("task") if isinstance(data.get("task"), dict) else {}
1338
+ record = data.get("record") if isinstance(data.get("record"), dict) else {}
1339
+ capabilities = data.get("capabilities") if isinstance(data.get("capabilities"), dict) else {}
1340
+ update_schema = data.get("update_schema") if isinstance(data.get("update_schema"), dict) else {}
1341
+ associated_reports = data.get("associated_reports") if isinstance(data.get("associated_reports"), dict) else {}
1342
+ candidates = data.get("candidates") if isinstance(data.get("candidates"), dict) else {}
1343
+ workflow_log = data.get("workflow_log_summary") if isinstance(data.get("workflow_log_summary"), dict) else {}
1344
+
1345
+ available_actions = [
1346
+ str(item)
1347
+ for item in (capabilities.get("available_actions") or [])
1348
+ if str(item).strip()
1349
+ ]
1350
+ writable_fields = update_schema.get("writable_fields") if isinstance(update_schema.get("writable_fields"), list) else []
1351
+ rollback_items = [
1352
+ self._compact_rollback_candidate(item)
1353
+ for item in (candidates.get("rollback_nodes") or [])
1354
+ if isinstance(item, dict)
1355
+ ]
1356
+ transfer_items = [
1357
+ self._compact_transfer_member(item)
1358
+ for item in (candidates.get("transfer_members") or [])
1359
+ if isinstance(item, dict)
1360
+ ]
1361
+ associated_items = [
1362
+ self._compact_associated_report(item)
1363
+ for item in (associated_reports.get("items") or [])
1364
+ if isinstance(item, dict)
1365
+ ]
1366
+ transfer_pagination = candidates.get("transfer_pagination") if isinstance(candidates.get("transfer_pagination"), dict) else {}
1367
+ compact: dict[str, Any] = {
1368
+ "task": {
1369
+ "app_key": task.get("app_key"),
1370
+ "app_name": task.get("app_name"),
1371
+ "record_id": task.get("record_id"),
1372
+ "workflow_node_id": task.get("workflow_node_id"),
1373
+ "workflow_node_name": task.get("workflow_node_name"),
1374
+ "initiator": self._compact_initiator(record.get("apply_user")),
1375
+ "actionable": task.get("actionable"),
1376
+ },
1377
+ "record_summary": {
1378
+ "apply_status": record.get("apply_status"),
1379
+ "apply_num": record.get("apply_num"),
1380
+ "custom_apply_num": record.get("custom_apply_num"),
1381
+ "apply_time": record.get("apply_time"),
1382
+ "last_update_time": record.get("last_update_time"),
1383
+ "core_fields": self._task_record_core_fields(record.get("answers") or []),
1384
+ },
1385
+ "available_actions": available_actions,
1386
+ "editable_fields": [
1387
+ self._compact_task_editable_field(item, update_schema)
1388
+ for item in writable_fields
1389
+ if isinstance(item, dict)
1390
+ ],
1391
+ "extras": {
1392
+ "workflow_log": {
1393
+ "available": bool(workflow_log.get("available")),
1394
+ "qrobot_log_visible": bool(workflow_log.get("qrobot_log_visible")),
1395
+ "history_count": workflow_log.get("history_count"),
1396
+ },
1397
+ "associated_reports": {
1398
+ "available": bool(associated_reports.get("visible")),
1399
+ "loaded": bool(associated_reports.get("loaded")),
1400
+ "count": len(associated_items),
1401
+ "items": associated_items,
1402
+ },
1403
+ "rollback_candidates": {
1404
+ "available": "rollback" in available_actions,
1405
+ "loaded": bool(candidates.get("loaded")),
1406
+ "count": len(rollback_items),
1407
+ "items": rollback_items,
1408
+ },
1409
+ "transfer_candidates": {
1410
+ "available": "transfer" in available_actions,
1411
+ "loaded": bool(transfer_pagination.get("loaded")),
1412
+ "count": len(transfer_items),
1413
+ "items": transfer_items,
1414
+ "pagination": transfer_pagination,
1415
+ "warnings": candidates.get("warnings") or [],
1416
+ },
1417
+ },
1418
+ }
1419
+ return compact
1420
+
1421
+ def _compact_task_action_metadata(self, capabilities: dict[str, Any]) -> dict[str, Any]:
1422
+ constraints = capabilities.get("action_constraints") if isinstance(capabilities.get("action_constraints"), dict) else {}
1423
+ metadata: dict[str, Any] = {}
1424
+ feedback_required_for = constraints.get("feedback_required_for") if isinstance(constraints.get("feedback_required_for"), list) else []
1425
+ if feedback_required_for:
1426
+ metadata["feedback_required_for"] = feedback_required_for
1427
+ visible_but_unimplemented = capabilities.get("visible_but_unimplemented_actions")
1428
+ if visible_but_unimplemented:
1429
+ metadata["visible_but_unimplemented_actions"] = visible_but_unimplemented
1430
+ if capabilities.get("save_only_source"):
1431
+ metadata["save_only_source"] = capabilities.get("save_only_source")
1432
+ if capabilities.get("warnings"):
1433
+ metadata["warnings"] = capabilities.get("warnings")
1434
+ return metadata
1435
+
1436
+ def _compact_task_editable_metadata(self, update_schema: dict[str, Any]) -> dict[str, Any]:
1437
+ metadata: dict[str, Any] = {}
1438
+ blockers = update_schema.get("blockers") if isinstance(update_schema.get("blockers"), list) else []
1439
+ warnings = update_schema.get("warnings") if isinstance(update_schema.get("warnings"), list) else []
1440
+ if blockers:
1441
+ metadata["blockers"] = blockers
1442
+ if warnings:
1443
+ metadata["warnings"] = warnings
1444
+ return metadata
1445
+
1446
+ def _compact_initiator(self, payload: Any) -> dict[str, Any] | None:
1447
+ if not isinstance(payload, dict):
1448
+ return None
1449
+ compact = {
1450
+ "uid": payload.get("uid"),
1451
+ "displayName": payload.get("displayName") or payload.get("name") or payload.get("nickName"),
1452
+ "email": payload.get("email"),
1453
+ "mobile": payload.get("mobile"),
1454
+ "headImg": payload.get("headImg"),
1455
+ }
1456
+ return {key: value for key, value in compact.items() if value not in (None, "", [])} or None
1457
+
1458
+ def _task_app_name(self, detail: dict[str, Any], node_info: dict[str, Any]) -> Any:
1459
+ for source in (detail, node_info):
1460
+ for key in ("formTitle", "appName", "worksheetName", "appTitle"):
1461
+ value = source.get(key)
1462
+ if value not in (None, ""):
1463
+ return value
1464
+ return None
1465
+
1466
+ def _task_record_core_fields(self, answers: Any, *, limit: int = 12) -> dict[str, Any]:
1467
+ if not isinstance(answers, list):
1468
+ return {}
1469
+ core_fields: dict[str, Any] = {}
1470
+ for answer in answers:
1471
+ if not isinstance(answer, dict):
1472
+ continue
1473
+ title = answer.get("queTitle") or answer.get("title") or answer.get("fieldName")
1474
+ if not title:
1475
+ que_id = answer.get("queId")
1476
+ title = f"field_{que_id}" if que_id not in (None, "") else None
1477
+ if not title:
1478
+ continue
1479
+ table_values = answer.get("tableValues") if isinstance(answer.get("tableValues"), list) else []
1480
+ if table_values:
1481
+ value: Any = f"子表格 {len(table_values)} 行"
1482
+ else:
1483
+ values = self._extract_answer_values(answer)
1484
+ if not values:
1485
+ continue
1486
+ value = values[0] if len(values) == 1 else values
1487
+ if value in (None, "", []):
1488
+ continue
1489
+ core_fields[str(title)] = self._compact_task_value(value)
1490
+ if len(core_fields) >= limit:
1491
+ break
1492
+ return core_fields
1493
+
1494
+ def _compact_task_value(self, value: Any) -> Any:
1495
+ if isinstance(value, list):
1496
+ return [self._compact_task_value(item) for item in value[:8]]
1497
+ text = re.sub(r"<[^>]+>", " ", str(value))
1498
+ text = re.sub(r"\s+", " ", text).strip()
1499
+ if len(text) <= 160:
1500
+ return text
1501
+ return text[:157].rstrip() + "..."
1502
+
1503
+ def _compact_task_editable_field(self, field: dict[str, Any], update_schema: dict[str, Any]) -> dict[str, Any]:
1504
+ payload_template = update_schema.get("payload_template") if isinstance(update_schema.get("payload_template"), dict) else {}
1505
+ title = field.get("title")
1506
+ compact: dict[str, Any] = {}
1507
+ for key in ("field_id", "title", "kind", "required", "candidate_hint"):
1508
+ if key in field:
1509
+ compact[key] = field.get(key)
1510
+ if title in payload_template:
1511
+ compact["template"] = payload_template.get(title)
1512
+ return compact
1513
+
1514
+ def _compact_associated_report(self, item: dict[str, Any]) -> dict[str, Any]:
1515
+ return {
1516
+ key: value
1517
+ for key, value in {
1518
+ "report_id": item.get("report_id"),
1519
+ "chart_key": item.get("chart_key"),
1520
+ "chart_name": item.get("chart_name"),
1521
+ "graph_type": item.get("graph_type"),
1522
+ "source_type": item.get("source_type"),
1523
+ "target_app_key": item.get("target_app_key"),
1524
+ "target_app_name": item.get("target_app_name"),
1525
+ }.items()
1526
+ if value not in (None, "", [])
1527
+ }
1528
+
1529
+ def _compact_rollback_candidate(self, item: dict[str, Any]) -> dict[str, Any]:
1530
+ return {
1531
+ key: value
1532
+ for key, value in {
1533
+ "workflow_node_id": item.get("auditNodeId") or item.get("nodeId"),
1534
+ "workflow_node_name": item.get("auditNodeName") or item.get("nodeName"),
1535
+ }.items()
1536
+ if value not in (None, "", [])
1537
+ }
1538
+
1539
+ def _compact_transfer_member(self, item: dict[str, Any]) -> dict[str, Any]:
1540
+ uid = item.get("uid")
1541
+ if uid is None:
1542
+ uid = item.get("userId") or item.get("memberId") or item.get("id")
1543
+ return {
1544
+ key: value
1545
+ for key, value in {
1546
+ "uid": uid,
1547
+ "name": item.get("name") or item.get("userName") or item.get("memberName") or item.get("realName"),
1548
+ "email": item.get("email") or item.get("mail"),
1549
+ "department_id": item.get("departmentId") or item.get("deptId"),
1550
+ "department_name": item.get("departmentName") or item.get("deptName"),
1551
+ }.items()
1552
+ if value not in (None, "", [])
1553
+ }
1554
+
1319
1555
  def _normalize_task_item(self, raw: dict[str, Any], *, task_box: str, flow_status: str) -> dict[str, Any]:
1320
1556
  app_key = raw.get("appKey") or raw.get("app_key")
1321
1557
  record_id = raw.get("rowRecordId") or raw.get("recordId") or raw.get("applyId")
1322
1558
  workflow_node_id = raw.get("nodeId") or raw.get("auditNodeId")
1323
- apply_user = raw.get("applyUser")
1324
- if apply_user is None:
1325
- user_uid = raw.get("applyUserUid")
1326
- user_name = raw.get("applyUserName")
1327
- if user_uid is not None or user_name is not None:
1328
- apply_user = {"uid": user_uid, "name": user_name}
1329
1559
  return {
1330
1560
  "task_id": raw.get("id") or raw.get("taskId") or record_id,
1331
1561
  "app_key": app_key,
@@ -1333,8 +1563,6 @@ class TaskContextTools(ToolBase):
1333
1563
  "record_id": record_id,
1334
1564
  "workflow_node_id": workflow_node_id,
1335
1565
  "workflow_node_name": raw.get("nodeName") or raw.get("auditNodeName"),
1336
- "title": raw.get("title") or raw.get("applyTitle") or raw.get("name") or raw.get("formTitle"),
1337
- "apply_user": apply_user,
1338
1566
  "apply_time": raw.get("applyTime") or raw.get("receiveTime"),
1339
1567
  "task_box": task_box,
1340
1568
  "flow_status": flow_status,
@@ -1490,16 +1718,16 @@ class TaskContextTools(ToolBase):
1490
1718
  write_hints = self._record_tools._schema_write_hints(editable_field)
1491
1719
  if not bool(write_hints.get("writable")):
1492
1720
  continue
1493
- writable_fields.append(
1494
- self._record_tools._ready_schema_field_payload(
1495
- profile,
1496
- context,
1497
- editable_field,
1498
- ws_id=context.ws_id,
1499
- required_override=False,
1500
- linkage_payloads_by_field_id=linkage_payloads_by_field_id,
1501
- )
1721
+ writable_field = self._record_tools._ready_schema_field_payload(
1722
+ profile,
1723
+ context,
1724
+ editable_field,
1725
+ ws_id=context.ws_id,
1726
+ required_override=False,
1727
+ linkage_payloads_by_field_id=linkage_payloads_by_field_id,
1502
1728
  )
1729
+ writable_field.setdefault("field_id", editable_field.que_id)
1730
+ writable_fields.append(writable_field)
1503
1731
  blockers: list[str] = []
1504
1732
  if not writable_fields:
1505
1733
  blockers.append("NO_TASK_EDITABLE_FIELDS")
@@ -1879,11 +2107,85 @@ class TaskContextTools(ToolBase):
1879
2107
  for item in items:
1880
2108
  if not isinstance(item, dict):
1881
2109
  continue
1882
- if current_uid is not None and item.get("uid") == current_uid:
2110
+ uid = _coerce_count(item.get("uid") or item.get("userId") or item.get("memberId") or item.get("id"))
2111
+ if current_uid is not None and uid == current_uid:
1883
2112
  continue
1884
2113
  filtered.append(item)
1885
2114
  return filtered
1886
2115
 
2116
+ def _transfer_candidate_items(
2117
+ self,
2118
+ context: BackendRequestContext,
2119
+ *,
2120
+ app_key: str,
2121
+ record_id: int,
2122
+ workflow_node_id: int,
2123
+ current_uid: int | None,
2124
+ ) -> tuple[list[dict[str, Any]], list[JSONObject], JSONObject]:
2125
+ page_size = 100
2126
+ max_pages = 100
2127
+ page_num = 1
2128
+ fetched_pages = 0
2129
+ fetched_raw_count = 0
2130
+ page_amount: int | None = None
2131
+ reported_total: int | None = None
2132
+ items: list[dict[str, Any]] = []
2133
+ seen_member_keys: set[str] = set()
2134
+ warnings: list[JSONObject] = []
2135
+
2136
+ while page_num <= max_pages:
2137
+ result = self.backend.request(
2138
+ "GET",
2139
+ context,
2140
+ f"/app/{app_key}/apply/{record_id}/transfer/member",
2141
+ params={"pageNum": page_num, "pageSize": page_size, "auditNodeId": workflow_node_id},
2142
+ )
2143
+ fetched_pages += 1
2144
+ raw_items = _approval_page_items(result)
2145
+ fetched_raw_count += len(raw_items)
2146
+ if page_amount is None:
2147
+ page_amount = _coerce_count(_approval_page_amount(result))
2148
+ if reported_total is None:
2149
+ reported_total = _coerce_count(_approval_page_total(result))
2150
+ for item in self._filter_transfer_members(raw_items, current_uid=current_uid):
2151
+ member_key = self._transfer_member_dedupe_key(item)
2152
+ if member_key in seen_member_keys:
2153
+ continue
2154
+ seen_member_keys.add(member_key)
2155
+ items.append(item)
2156
+ if not raw_items:
2157
+ break
2158
+ if page_amount is not None and page_num >= page_amount:
2159
+ break
2160
+ if reported_total is not None and fetched_raw_count >= reported_total:
2161
+ break
2162
+ page_num += 1
2163
+ truncated = page_num > max_pages
2164
+ if truncated:
2165
+ warnings.append(
2166
+ {
2167
+ "code": "TRANSFER_CANDIDATES_TRUNCATED",
2168
+ "message": "transfer candidates reached the MCP safety page cap; returned candidates may be incomplete.",
2169
+ "max_pages": max_pages,
2170
+ "page_size": page_size,
2171
+ }
2172
+ )
2173
+ pagination: JSONObject = {
2174
+ "loaded": True,
2175
+ "page_size": page_size,
2176
+ "fetched_pages": fetched_pages,
2177
+ "reported_total": reported_total,
2178
+ "page_amount": page_amount,
2179
+ "truncated": truncated,
2180
+ }
2181
+ return items, warnings, pagination
2182
+
2183
+ def _transfer_member_dedupe_key(self, item: dict[str, Any]) -> str:
2184
+ uid = item.get("uid") or item.get("userId") or item.get("memberId") or item.get("id")
2185
+ if uid not in (None, ""):
2186
+ return f"uid:{uid}"
2187
+ return json.dumps(item, ensure_ascii=False, sort_keys=True, default=str)
2188
+
1887
2189
  def _find_associated_report(self, task_context: dict[str, Any], report_id: int) -> dict[str, Any] | None:
1888
2190
  associated_reports = ((task_context.get("associated_reports") or {}).get("items") or [])
1889
2191
  for item in associated_reports: