@josephyan/qingflow-app-user-mcp 0.2.0-beta.50 → 0.2.0-beta.52
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +2 -2
- package/npm/lib/runtime.mjs +22 -1
- package/package.json +1 -1
- package/pyproject.toml +1 -1
- package/src/qingflow_mcp/tools/import_tools.py +175 -23
package/README.md
CHANGED
|
@@ -3,13 +3,13 @@
|
|
|
3
3
|
Install:
|
|
4
4
|
|
|
5
5
|
```bash
|
|
6
|
-
npm install @josephyan/qingflow-app-user-mcp@0.2.0-beta.
|
|
6
|
+
npm install @josephyan/qingflow-app-user-mcp@0.2.0-beta.52
|
|
7
7
|
```
|
|
8
8
|
|
|
9
9
|
Run:
|
|
10
10
|
|
|
11
11
|
```bash
|
|
12
|
-
npx -y -p @josephyan/qingflow-app-user-mcp@0.2.0-beta.
|
|
12
|
+
npx -y -p @josephyan/qingflow-app-user-mcp@0.2.0-beta.52 qingflow-app-user-mcp
|
|
13
13
|
```
|
|
14
14
|
|
|
15
15
|
Environment:
|
package/npm/lib/runtime.mjs
CHANGED
|
@@ -82,6 +82,16 @@ export function getVenvServerCommand(packageRoot, commandName = "qingflow-mcp")
|
|
|
82
82
|
: path.join(getVenvDir(packageRoot), "bin", commandName);
|
|
83
83
|
}
|
|
84
84
|
|
|
85
|
+
function readPackageVersion(packageRoot) {
|
|
86
|
+
const packageJsonPath = path.join(packageRoot, "package.json");
|
|
87
|
+
try {
|
|
88
|
+
const payload = JSON.parse(fs.readFileSync(packageJsonPath, "utf8"));
|
|
89
|
+
return typeof payload.version === "string" && payload.version.trim() ? payload.version.trim() : null;
|
|
90
|
+
} catch {
|
|
91
|
+
return null;
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
|
|
85
95
|
function getVenvPip(packageRoot) {
|
|
86
96
|
return WINDOWS
|
|
87
97
|
? path.join(getVenvDir(packageRoot), "Scripts", "pip.exe")
|
|
@@ -123,8 +133,18 @@ export function ensurePythonEnv(packageRoot, { force = false, commandName = "qin
|
|
|
123
133
|
const venvPython = getVenvPython(packageRoot);
|
|
124
134
|
const serverCommand = getVenvServerCommand(packageRoot, commandName);
|
|
125
135
|
const stampPath = path.join(venvDir, ".bootstrap.json");
|
|
136
|
+
const packageVersion = readPackageVersion(packageRoot);
|
|
137
|
+
let stampVersion = null;
|
|
138
|
+
if (fs.existsSync(stampPath)) {
|
|
139
|
+
try {
|
|
140
|
+
const payload = JSON.parse(fs.readFileSync(stampPath, "utf8"));
|
|
141
|
+
stampVersion = typeof payload.package_version === "string" && payload.package_version.trim() ? payload.package_version.trim() : null;
|
|
142
|
+
} catch {
|
|
143
|
+
stampVersion = null;
|
|
144
|
+
}
|
|
145
|
+
}
|
|
126
146
|
|
|
127
|
-
if (!force && fs.existsSync(serverCommand) && fs.existsSync(stampPath)) {
|
|
147
|
+
if (!force && fs.existsSync(serverCommand) && fs.existsSync(stampPath) && stampVersion && stampVersion === packageVersion) {
|
|
128
148
|
return serverCommand;
|
|
129
149
|
}
|
|
130
150
|
|
|
@@ -145,6 +165,7 @@ export function ensurePythonEnv(packageRoot, { force = false, commandName = "qin
|
|
|
145
165
|
{
|
|
146
166
|
installed_at: new Date().toISOString(),
|
|
147
167
|
installer: "npm",
|
|
168
|
+
package_version: packageVersion,
|
|
148
169
|
},
|
|
149
170
|
null,
|
|
150
171
|
2
|
package/package.json
CHANGED
package/pyproject.toml
CHANGED
|
@@ -267,12 +267,13 @@ class ImportTools(ToolBase):
|
|
|
267
267
|
app_key,
|
|
268
268
|
import_capability=import_capability,
|
|
269
269
|
)
|
|
270
|
-
|
|
270
|
+
template_header_profile, header_warnings = self._load_template_header_profile(
|
|
271
271
|
context,
|
|
272
272
|
app_key,
|
|
273
273
|
import_capability=import_capability,
|
|
274
274
|
expected_columns=expected_columns,
|
|
275
275
|
)
|
|
276
|
+
template_header_titles = template_header_profile.get("allowed_titles")
|
|
276
277
|
local_check = self._local_verify(
|
|
277
278
|
path=path,
|
|
278
279
|
app_key=app_key,
|
|
@@ -280,9 +281,28 @@ class ImportTools(ToolBase):
|
|
|
280
281
|
allowed_header_titles=template_header_titles,
|
|
281
282
|
schema_fingerprint=schema_fingerprint,
|
|
282
283
|
)
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
284
|
+
effective_path = path
|
|
285
|
+
effective_local_check = local_check
|
|
286
|
+
auto_normalization = self._maybe_auto_normalize_file(
|
|
287
|
+
source_path=path,
|
|
288
|
+
expected_columns=expected_columns,
|
|
289
|
+
template_header_profile=template_header_profile,
|
|
290
|
+
local_check=local_check,
|
|
291
|
+
)
|
|
292
|
+
if auto_normalization is not None:
|
|
293
|
+
effective_path = Path(str(auto_normalization["verified_file_path"]))
|
|
294
|
+
effective_local_check = self._local_verify(
|
|
295
|
+
path=effective_path,
|
|
296
|
+
app_key=app_key,
|
|
297
|
+
expected_columns=expected_columns,
|
|
298
|
+
allowed_header_titles=list(auto_normalization["header_titles"]),
|
|
299
|
+
schema_fingerprint=schema_fingerprint,
|
|
300
|
+
)
|
|
301
|
+
warnings = import_warnings + deepcopy(effective_local_check["warnings"]) + header_warnings
|
|
302
|
+
if auto_normalization is not None:
|
|
303
|
+
warnings.extend(deepcopy(auto_normalization["warnings"]))
|
|
304
|
+
issues = deepcopy(effective_local_check["issues"])
|
|
305
|
+
can_import = bool(effective_local_check["can_import"])
|
|
286
306
|
backend_verification = None
|
|
287
307
|
if can_import:
|
|
288
308
|
try:
|
|
@@ -292,9 +312,9 @@ class ImportTools(ToolBase):
|
|
|
292
312
|
f"/app/{app_key}/upload/verification",
|
|
293
313
|
files={
|
|
294
314
|
"file": (
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
mimetypes.guess_type(
|
|
315
|
+
effective_path.name,
|
|
316
|
+
effective_path.read_bytes(),
|
|
317
|
+
mimetypes.guess_type(effective_path.name)[0] or "application/octet-stream",
|
|
298
318
|
)
|
|
299
319
|
},
|
|
300
320
|
)
|
|
@@ -334,8 +354,11 @@ class ImportTools(ToolBase):
|
|
|
334
354
|
"profile": profile,
|
|
335
355
|
"app_key": app_key,
|
|
336
356
|
"file_path": str(path.resolve()),
|
|
357
|
+
"source_file_path": str(path.resolve()),
|
|
358
|
+
"verified_file_path": str(effective_path.resolve()) if effective_path != path else None,
|
|
337
359
|
"file_name": path.name,
|
|
338
360
|
"file_sha256": local_check["file_sha256"],
|
|
361
|
+
"verified_file_sha256": effective_local_check["file_sha256"] if effective_path != path else None,
|
|
339
362
|
"file_size": local_check["file_size"],
|
|
340
363
|
"schema_fingerprint": schema_fingerprint,
|
|
341
364
|
"can_import": can_import,
|
|
@@ -344,7 +367,9 @@ class ImportTools(ToolBase):
|
|
|
344
367
|
"import_capability": import_capability,
|
|
345
368
|
"apply_rows": backend_verification.get("applyRows") if isinstance(backend_verification, dict) else None,
|
|
346
369
|
"backend_verification": backend_verification,
|
|
347
|
-
"local_precheck":
|
|
370
|
+
"local_precheck": effective_local_check,
|
|
371
|
+
"source_local_precheck": local_check,
|
|
372
|
+
"auto_normalization": auto_normalization,
|
|
348
373
|
}
|
|
349
374
|
self._verification_store.put(verification_id, verification_payload)
|
|
350
375
|
return {
|
|
@@ -354,8 +379,10 @@ class ImportTools(ToolBase):
|
|
|
354
379
|
"can_import": can_import,
|
|
355
380
|
"verification_id": verification_id,
|
|
356
381
|
"file_path": str(path.resolve()),
|
|
382
|
+
"verified_file_path": str(effective_path.resolve()) if effective_path != path else None,
|
|
357
383
|
"file_name": path.name,
|
|
358
384
|
"file_sha256": local_check["file_sha256"],
|
|
385
|
+
"verified_file_sha256": effective_local_check["file_sha256"] if effective_path != path else None,
|
|
359
386
|
"file_size": local_check["file_size"],
|
|
360
387
|
"schema_fingerprint": schema_fingerprint,
|
|
361
388
|
"apply_rows": backend_verification.get("applyRows") if isinstance(backend_verification, dict) else None,
|
|
@@ -372,8 +399,10 @@ class ImportTools(ToolBase):
|
|
|
372
399
|
and backend_verification.get("beingValidated", True) is not False,
|
|
373
400
|
"schema_fingerprint": schema_fingerprint,
|
|
374
401
|
"file_sha256": local_check["file_sha256"],
|
|
402
|
+
"verified_file_sha256": effective_local_check["file_sha256"] if effective_path != path else None,
|
|
375
403
|
"file_format": local_check["extension"],
|
|
376
404
|
"local_precheck_limited": bool(local_check["local_precheck_limited"]),
|
|
405
|
+
"auto_normalized": effective_path != path,
|
|
377
406
|
},
|
|
378
407
|
}
|
|
379
408
|
|
|
@@ -409,7 +438,7 @@ class ImportTools(ToolBase):
|
|
|
409
438
|
stored = self._verification_store.get(verification_id)
|
|
410
439
|
if stored is None:
|
|
411
440
|
return self._failed_repair_result(error_code="IMPORT_VERIFICATION_STALE", message="verification_id is missing or expired")
|
|
412
|
-
source_path = Path(str(stored["file_path"]))
|
|
441
|
+
source_path = Path(str(stored.get("source_file_path") or stored["file_path"]))
|
|
413
442
|
extension = source_path.suffix.lower()
|
|
414
443
|
if extension not in REPAIRABLE_IMPORT_EXTENSIONS:
|
|
415
444
|
return self._failed_repair_result(
|
|
@@ -510,11 +539,12 @@ class ImportTools(ToolBase):
|
|
|
510
539
|
return self._failed_start_result(error_code="IMPORT_VERIFICATION_STALE", message="verification_id does not belong to the requested app")
|
|
511
540
|
if not bool(stored.get("can_import")):
|
|
512
541
|
return self._failed_start_result(error_code="IMPORT_VERIFICATION_FAILED", message="verification_id is not importable", extra={"accepted": False})
|
|
513
|
-
current_path = Path(str(stored["file_path"]))
|
|
542
|
+
current_path = Path(str(stored.get("verified_file_path") or stored["file_path"]))
|
|
514
543
|
if not current_path.is_file():
|
|
515
544
|
return self._failed_start_result(error_code="IMPORT_VERIFICATION_STALE", message="verified file no longer exists")
|
|
516
545
|
current_sha256 = _sha256_file(current_path)
|
|
517
|
-
|
|
546
|
+
expected_sha256 = stored.get("verified_file_sha256") or stored.get("file_sha256")
|
|
547
|
+
if current_sha256 != expected_sha256:
|
|
518
548
|
return self._failed_start_result(
|
|
519
549
|
error_code="IMPORT_FILE_CHANGED_AFTER_VERIFY",
|
|
520
550
|
message="the file changed after verification; run record_import_verify again",
|
|
@@ -542,7 +572,7 @@ class ImportTools(ToolBase):
|
|
|
542
572
|
being_enter_auditing=bool(being_enter_auditing),
|
|
543
573
|
view_key=view_key,
|
|
544
574
|
excel_url=file_url,
|
|
545
|
-
excel_name=current_path.name,
|
|
575
|
+
excel_name=str(stored.get("file_name") or current_path.name),
|
|
546
576
|
)
|
|
547
577
|
except QingflowApiError as exc:
|
|
548
578
|
error_code = "IMPORT_SOCKET_ACK_TIMEOUT" if exc.details and exc.details.get("error_code") == "IMPORT_SOCKET_ACK_TIMEOUT" else "IMPORT_VERIFICATION_FAILED"
|
|
@@ -558,7 +588,7 @@ class ImportTools(ToolBase):
|
|
|
558
588
|
"app_key": app_key,
|
|
559
589
|
"import_id": import_id,
|
|
560
590
|
"process_id_str": process_id_str,
|
|
561
|
-
"source_file_name": current_path.name,
|
|
591
|
+
"source_file_name": str(stored.get("file_name") or current_path.name),
|
|
562
592
|
"started_at": started_at,
|
|
563
593
|
"file_url": file_url,
|
|
564
594
|
"verification_id": verification_id,
|
|
@@ -571,7 +601,7 @@ class ImportTools(ToolBase):
|
|
|
571
601
|
"accepted": True,
|
|
572
602
|
"import_id": import_id,
|
|
573
603
|
"process_id_str": process_id_str,
|
|
574
|
-
|
|
604
|
+
"source_file_name": str(stored.get("file_name") or current_path.name),
|
|
575
605
|
"file_url": file_url,
|
|
576
606
|
"warnings": warnings,
|
|
577
607
|
"verification": {
|
|
@@ -810,29 +840,35 @@ class ImportTools(ToolBase):
|
|
|
810
840
|
base_result["error_code"] = "IMPORT_VERIFICATION_FAILED"
|
|
811
841
|
return base_result
|
|
812
842
|
|
|
813
|
-
def
|
|
843
|
+
def _load_template_header_profile(
|
|
814
844
|
self,
|
|
815
845
|
context,
|
|
816
846
|
app_key: str,
|
|
817
847
|
*,
|
|
818
848
|
import_capability: JSONObject | None = None,
|
|
819
849
|
expected_columns: list[JSONObject] | None = None,
|
|
820
|
-
) -> tuple[
|
|
850
|
+
) -> tuple[dict[str, Any], list[JSONObject]]: # type: ignore[no-untyped-def]
|
|
821
851
|
warnings: list[JSONObject] = []
|
|
822
852
|
try:
|
|
823
853
|
payload = self.backend.request("GET", context, f"/app/{app_key}/apply/excelTemplate")
|
|
824
854
|
template_url = _pick_template_url(payload)
|
|
825
855
|
if not template_url:
|
|
826
|
-
return None, warnings
|
|
856
|
+
return {"allowed_titles": None, "leaf_titles": None, "header_depth": 1}, warnings
|
|
827
857
|
content = self.backend.download_binary(template_url)
|
|
828
|
-
workbook = load_workbook(BytesIO(content), read_only=
|
|
858
|
+
workbook = load_workbook(BytesIO(content), read_only=False, data_only=False)
|
|
829
859
|
if not workbook.sheetnames:
|
|
830
|
-
return None, warnings
|
|
860
|
+
return {"allowed_titles": None, "leaf_titles": None, "header_depth": 1}, warnings
|
|
831
861
|
sheet = workbook[workbook.sheetnames[0]]
|
|
832
862
|
header_row = [cell.value for cell in next(sheet.iter_rows(min_row=1, max_row=1), [])]
|
|
833
863
|
titles = [_normalize_optional_text(value) for value in header_row]
|
|
834
864
|
normalized_titles = [title for title in titles if title]
|
|
835
|
-
|
|
865
|
+
header_depth = _infer_header_depth(sheet)
|
|
866
|
+
leaf_titles = [title for title in _extract_leaf_header_titles(sheet, header_depth) if title]
|
|
867
|
+
return {
|
|
868
|
+
"allowed_titles": normalized_titles or None,
|
|
869
|
+
"leaf_titles": leaf_titles or None,
|
|
870
|
+
"header_depth": header_depth,
|
|
871
|
+
}, warnings
|
|
836
872
|
except Exception:
|
|
837
873
|
if (
|
|
838
874
|
_normalize_optional_text((import_capability or {}).get("auth_source")) == "apply_auth"
|
|
@@ -844,14 +880,78 @@ class ImportTools(ToolBase):
|
|
|
844
880
|
"message": "Official template headers require data management permission; local precheck fell back to applicant import columns.",
|
|
845
881
|
}
|
|
846
882
|
)
|
|
847
|
-
|
|
883
|
+
fallback_titles = [str(item["title"]) for item in expected_columns]
|
|
884
|
+
return {"allowed_titles": fallback_titles, "leaf_titles": fallback_titles, "header_depth": 1}, warnings
|
|
848
885
|
warnings.append(
|
|
849
886
|
{
|
|
850
887
|
"code": "IMPORT_TEMPLATE_HEADER_UNAVAILABLE",
|
|
851
888
|
"message": "Official template headers could not be loaded during local precheck; falling back to applicant writable columns only.",
|
|
852
889
|
}
|
|
853
890
|
)
|
|
854
|
-
return None, warnings
|
|
891
|
+
return {"allowed_titles": None, "leaf_titles": None, "header_depth": 1}, warnings
|
|
892
|
+
|
|
893
|
+
def _maybe_auto_normalize_file(
|
|
894
|
+
self,
|
|
895
|
+
*,
|
|
896
|
+
source_path: Path,
|
|
897
|
+
expected_columns: list[JSONObject],
|
|
898
|
+
template_header_profile: dict[str, Any],
|
|
899
|
+
local_check: dict[str, Any],
|
|
900
|
+
) -> dict[str, Any] | None:
|
|
901
|
+
if source_path.suffix.lower() != ".xlsx":
|
|
902
|
+
return None
|
|
903
|
+
workbook = load_workbook(source_path, read_only=False, data_only=False)
|
|
904
|
+
if not workbook.sheetnames:
|
|
905
|
+
return None
|
|
906
|
+
sheet = workbook[workbook.sheetnames[0]]
|
|
907
|
+
header_depth = _infer_header_depth(sheet)
|
|
908
|
+
trailing_blank_rows = _count_trailing_blank_rows(sheet)
|
|
909
|
+
if header_depth <= 1 and trailing_blank_rows <= 0:
|
|
910
|
+
return None
|
|
911
|
+
extracted_headers = _extract_leaf_header_titles(sheet, header_depth)
|
|
912
|
+
target_headers = _overlay_header_titles(
|
|
913
|
+
extracted_headers,
|
|
914
|
+
template_header_profile.get("leaf_titles"),
|
|
915
|
+
)
|
|
916
|
+
verified_path = _resolve_verified_output_path(source_path)
|
|
917
|
+
normalized_workbook = Workbook()
|
|
918
|
+
normalized_sheet = normalized_workbook.active
|
|
919
|
+
normalized_sheet.title = sheet.title
|
|
920
|
+
normalized_sheet.append(target_headers)
|
|
921
|
+
last_nonblank_row = max(header_depth, sheet.max_row - trailing_blank_rows)
|
|
922
|
+
for row_index in range(header_depth + 1, last_nonblank_row + 1):
|
|
923
|
+
normalized_sheet.append(
|
|
924
|
+
[sheet.cell(row=row_index, column=column_index).value for column_index in range(1, sheet.max_column + 1)]
|
|
925
|
+
)
|
|
926
|
+
verified_path.parent.mkdir(parents=True, exist_ok=True)
|
|
927
|
+
normalized_workbook.save(verified_path)
|
|
928
|
+
warnings: list[JSONObject] = []
|
|
929
|
+
applied_repairs: list[str] = []
|
|
930
|
+
if header_depth > 1:
|
|
931
|
+
applied_repairs.append("normalize_headers")
|
|
932
|
+
warnings.append(
|
|
933
|
+
{
|
|
934
|
+
"code": "IMPORT_HEADERS_AUTO_NORMALIZED",
|
|
935
|
+
"message": f"Workbook used {header_depth} header rows; record_import_verify normalized it to a single leaf-header row automatically.",
|
|
936
|
+
}
|
|
937
|
+
)
|
|
938
|
+
if trailing_blank_rows > 0:
|
|
939
|
+
applied_repairs.append("trim_trailing_blank_rows")
|
|
940
|
+
warnings.append(
|
|
941
|
+
{
|
|
942
|
+
"code": "TRAILING_BLANK_ROWS_AUTO_TRIMMED",
|
|
943
|
+
"message": f"Removed {trailing_blank_rows} trailing blank rows before backend verification.",
|
|
944
|
+
}
|
|
945
|
+
)
|
|
946
|
+
return {
|
|
947
|
+
"verified_file_path": str(verified_path.resolve()),
|
|
948
|
+
"header_titles": target_headers,
|
|
949
|
+
"warnings": warnings,
|
|
950
|
+
"applied_repairs": applied_repairs,
|
|
951
|
+
"header_depth": header_depth,
|
|
952
|
+
"trailing_blank_rows": trailing_blank_rows,
|
|
953
|
+
"source_local_check": local_check,
|
|
954
|
+
}
|
|
855
955
|
|
|
856
956
|
def _fetch_import_capability(self, context, app_key: str) -> tuple[JSONObject, list[JSONObject]]: # type: ignore[no-untyped-def]
|
|
857
957
|
try:
|
|
@@ -919,8 +1019,10 @@ class ImportTools(ToolBase):
|
|
|
919
1019
|
"can_import": False,
|
|
920
1020
|
"verification_id": None,
|
|
921
1021
|
"file_path": str(Path(file_path).expanduser()) if file_path else file_path,
|
|
1022
|
+
"verified_file_path": None,
|
|
922
1023
|
"file_name": Path(file_path).name if file_path else None,
|
|
923
1024
|
"file_sha256": None,
|
|
1025
|
+
"verified_file_sha256": None,
|
|
924
1026
|
"file_size": None,
|
|
925
1027
|
"schema_fingerprint": None,
|
|
926
1028
|
"apply_rows": None,
|
|
@@ -1028,7 +1130,7 @@ class ImportTools(ToolBase):
|
|
|
1028
1130
|
response = {
|
|
1029
1131
|
"ok": False,
|
|
1030
1132
|
"status": "failed",
|
|
1031
|
-
"error_code": payload.get("details", {}).get("error_code") or error_code,
|
|
1133
|
+
"error_code": ((payload.get("details") or {}) if isinstance(payload.get("details"), dict) else {}).get("error_code") or error_code,
|
|
1032
1134
|
"warnings": [],
|
|
1033
1135
|
"verification": {},
|
|
1034
1136
|
"message": payload.get("message") or str(error),
|
|
@@ -1067,6 +1169,10 @@ def _resolve_repaired_output_path(source_path: Path, *, output_path: str | None)
|
|
|
1067
1169
|
return source_path.with_name(f"{source_path.stem}.repaired{source_path.suffix}")
|
|
1068
1170
|
|
|
1069
1171
|
|
|
1172
|
+
def _resolve_verified_output_path(source_path: Path) -> Path:
|
|
1173
|
+
return Path(tempfile.gettempdir()) / f"qingflow-import-verified-{source_path.stem}-{uuid4().hex[:8]}{source_path.suffix}"
|
|
1174
|
+
|
|
1175
|
+
|
|
1070
1176
|
def _utc_now() -> datetime:
|
|
1071
1177
|
return datetime.now(timezone.utc)
|
|
1072
1178
|
|
|
@@ -1173,6 +1279,52 @@ def _analyze_headers(
|
|
|
1173
1279
|
return {"issues": issues, "repair_suggestions": repair_suggestions}
|
|
1174
1280
|
|
|
1175
1281
|
|
|
1282
|
+
def _infer_header_depth(sheet) -> int: # type: ignore[no-untyped-def]
|
|
1283
|
+
header_depth = 1
|
|
1284
|
+
merged_cells = getattr(sheet, "merged_cells", None)
|
|
1285
|
+
merged_ranges = getattr(merged_cells, "ranges", merged_cells) if merged_cells is not None else []
|
|
1286
|
+
row_one_has_merge = False
|
|
1287
|
+
for merged_range in merged_ranges or []:
|
|
1288
|
+
min_row = int(getattr(merged_range, "min_row", 1))
|
|
1289
|
+
max_row = int(getattr(merged_range, "max_row", 1))
|
|
1290
|
+
if min_row == 1:
|
|
1291
|
+
row_one_has_merge = True
|
|
1292
|
+
header_depth = max(header_depth, max_row)
|
|
1293
|
+
if row_one_has_merge and sheet.max_row >= 2:
|
|
1294
|
+
row_two_values = [cell.value for cell in sheet[2]]
|
|
1295
|
+
if any(_normalize_optional_text(value) for value in row_two_values):
|
|
1296
|
+
header_depth = max(header_depth, 2)
|
|
1297
|
+
return min(header_depth, max(1, int(sheet.max_row)))
|
|
1298
|
+
|
|
1299
|
+
|
|
1300
|
+
def _extract_leaf_header_titles(sheet, header_depth: int) -> list[str]: # type: ignore[no-untyped-def]
|
|
1301
|
+
titles: list[str] = []
|
|
1302
|
+
max_column = max(1, int(sheet.max_column))
|
|
1303
|
+
depth = max(1, min(header_depth, int(sheet.max_row)))
|
|
1304
|
+
for column_index in range(1, max_column + 1):
|
|
1305
|
+
selected = ""
|
|
1306
|
+
for row_index in range(depth, 0, -1):
|
|
1307
|
+
text = _normalize_optional_text(sheet.cell(row=row_index, column=column_index).value)
|
|
1308
|
+
if text:
|
|
1309
|
+
selected = text
|
|
1310
|
+
break
|
|
1311
|
+
titles.append(selected)
|
|
1312
|
+
return titles
|
|
1313
|
+
|
|
1314
|
+
|
|
1315
|
+
def _overlay_header_titles(actual_titles: list[str], template_leaf_titles: Any) -> list[str]:
|
|
1316
|
+
normalized = list(actual_titles)
|
|
1317
|
+
if not isinstance(template_leaf_titles, list):
|
|
1318
|
+
return normalized
|
|
1319
|
+
for index, title in enumerate(template_leaf_titles):
|
|
1320
|
+
normalized_title = _normalize_optional_text(title)
|
|
1321
|
+
if normalized_title is None:
|
|
1322
|
+
continue
|
|
1323
|
+
if index < len(normalized):
|
|
1324
|
+
normalized[index] = normalized_title
|
|
1325
|
+
return normalized
|
|
1326
|
+
|
|
1327
|
+
|
|
1176
1328
|
def _count_trailing_blank_rows(sheet) -> int: # type: ignore[no-untyped-def]
|
|
1177
1329
|
count = 0
|
|
1178
1330
|
for row_index in range(sheet.max_row, 1, -1):
|