pysfi 0.1.7__py3-none-any.whl → 0.1.11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {pysfi-0.1.7.dist-info → pysfi-0.1.11.dist-info}/METADATA +11 -9
- pysfi-0.1.11.dist-info/RECORD +60 -0
- pysfi-0.1.11.dist-info/entry_points.txt +28 -0
- sfi/__init__.py +1 -1
- sfi/alarmclock/alarmclock.py +40 -40
- sfi/bumpversion/__init__.py +1 -1
- sfi/cleanbuild/cleanbuild.py +155 -0
- sfi/condasetup/condasetup.py +116 -0
- sfi/docscan/__init__.py +1 -1
- sfi/docscan/docscan.py +407 -103
- sfi/docscan/docscan_gui.py +1282 -596
- sfi/docscan/lang/eng.py +152 -0
- sfi/docscan/lang/zhcn.py +170 -0
- sfi/filedate/filedate.py +185 -112
- sfi/gittool/__init__.py +2 -0
- sfi/gittool/gittool.py +401 -0
- sfi/llmclient/llmclient.py +592 -0
- sfi/llmquantize/llmquantize.py +480 -0
- sfi/llmserver/llmserver.py +335 -0
- sfi/makepython/makepython.py +31 -30
- sfi/pdfsplit/pdfsplit.py +173 -173
- sfi/pyarchive/pyarchive.py +418 -0
- sfi/pyembedinstall/pyembedinstall.py +629 -0
- sfi/pylibpack/__init__.py +0 -0
- sfi/pylibpack/pylibpack.py +1457 -0
- sfi/pylibpack/rules/numpy.json +22 -0
- sfi/pylibpack/rules/pymupdf.json +10 -0
- sfi/pylibpack/rules/pyqt5.json +19 -0
- sfi/pylibpack/rules/pyside2.json +23 -0
- sfi/pylibpack/rules/scipy.json +23 -0
- sfi/pylibpack/rules/shiboken2.json +24 -0
- sfi/pyloadergen/pyloadergen.py +512 -227
- sfi/pypack/__init__.py +0 -0
- sfi/pypack/pypack.py +1142 -0
- sfi/pyprojectparse/__init__.py +0 -0
- sfi/pyprojectparse/pyprojectparse.py +500 -0
- sfi/pysourcepack/pysourcepack.py +308 -0
- sfi/quizbase/__init__.py +0 -0
- sfi/quizbase/quizbase.py +828 -0
- sfi/quizbase/quizbase_gui.py +987 -0
- sfi/regexvalidate/__init__.py +0 -0
- sfi/regexvalidate/regex_help.html +284 -0
- sfi/regexvalidate/regexvalidate.py +468 -0
- sfi/taskkill/taskkill.py +0 -2
- sfi/workflowengine/__init__.py +0 -0
- sfi/workflowengine/workflowengine.py +444 -0
- pysfi-0.1.7.dist-info/RECORD +0 -31
- pysfi-0.1.7.dist-info/entry_points.txt +0 -15
- sfi/embedinstall/embedinstall.py +0 -418
- sfi/projectparse/projectparse.py +0 -152
- sfi/pypacker/fspacker.py +0 -91
- {pysfi-0.1.7.dist-info → pysfi-0.1.11.dist-info}/WHEEL +0 -0
- /sfi/{embedinstall → docscan/lang}/__init__.py +0 -0
- /sfi/{projectparse → llmquantize}/__init__.py +0 -0
- /sfi/{pypacker → pyembedinstall}/__init__.py +0 -0
|
@@ -0,0 +1,418 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import argparse
|
|
4
|
+
import json
|
|
5
|
+
import logging
|
|
6
|
+
import shutil
|
|
7
|
+
import subprocess
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
from typing import Any
|
|
10
|
+
|
|
11
|
+
from sfi.pyprojectparse.pyprojectparse import Project, Solution
|
|
12
|
+
|
|
13
|
+
logging.basicConfig(level=logging.INFO, format="%(levelname)s: %(message)s")
|
|
14
|
+
logger = logging.getLogger(__name__)
|
|
15
|
+
|
|
16
|
+
ARCHIVE_FORMATS: frozenset[str] = frozenset(["zip", "7z", "nsis"])
|
|
17
|
+
|
|
18
|
+
DEFAULT_IGNORE_PATTERNS: frozenset[str] = frozenset([
|
|
19
|
+
"__pycache__",
|
|
20
|
+
"*.pyc",
|
|
21
|
+
"*.pyo",
|
|
22
|
+
".git",
|
|
23
|
+
".gitignore",
|
|
24
|
+
".pytest_cache",
|
|
25
|
+
".coverage",
|
|
26
|
+
"*.egg-info",
|
|
27
|
+
"dist",
|
|
28
|
+
"build",
|
|
29
|
+
"*.log",
|
|
30
|
+
".DS_Store",
|
|
31
|
+
"Thumbs.db",
|
|
32
|
+
".ruff_cache",
|
|
33
|
+
".benchmarks",
|
|
34
|
+
])
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def should_ignore(file_path: Path, ignore_patterns: set[str]) -> bool:
|
|
38
|
+
"""Check if a file should be ignored based on patterns."""
|
|
39
|
+
from fnmatch import fnmatch
|
|
40
|
+
|
|
41
|
+
# Convert to string for pattern matching
|
|
42
|
+
file_str = str(file_path)
|
|
43
|
+
|
|
44
|
+
for pattern in ignore_patterns:
|
|
45
|
+
if pattern.startswith("*."):
|
|
46
|
+
# Check just the filename
|
|
47
|
+
if fnmatch(file_path.name, pattern):
|
|
48
|
+
return True
|
|
49
|
+
else:
|
|
50
|
+
# Check full path
|
|
51
|
+
if pattern in file_str or fnmatch(file_str, pattern):
|
|
52
|
+
return True
|
|
53
|
+
|
|
54
|
+
return False
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
def load_projects(config_file: Path) -> dict[str, Any]:
|
|
58
|
+
"""Load projects configuration from JSON file."""
|
|
59
|
+
if not config_file.exists():
|
|
60
|
+
logger.error(f"Configuration file not found: {config_file}")
|
|
61
|
+
return {}
|
|
62
|
+
|
|
63
|
+
with open(config_file, encoding="utf-8") as f:
|
|
64
|
+
return json.load(f)
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
def get_project_directory(project_name: str, base_dir: Path) -> Path | None:
|
|
68
|
+
"""Locate the project directory."""
|
|
69
|
+
project_path = base_dir / project_name
|
|
70
|
+
if project_path.exists() and project_path.is_dir():
|
|
71
|
+
return project_path
|
|
72
|
+
|
|
73
|
+
return None
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
def check_command_available(command: str) -> bool:
|
|
77
|
+
"""Check if a command is available in the system PATH."""
|
|
78
|
+
try:
|
|
79
|
+
subprocess.run(
|
|
80
|
+
["where", command] if shutil.which("where") else ["which", command],
|
|
81
|
+
capture_output=True,
|
|
82
|
+
check=True,
|
|
83
|
+
shell=True,
|
|
84
|
+
)
|
|
85
|
+
return True
|
|
86
|
+
except (subprocess.CalledProcessError, FileNotFoundError):
|
|
87
|
+
return False
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
def archive_zip(
|
|
91
|
+
dist_dir: Path,
|
|
92
|
+
output_file: Path,
|
|
93
|
+
ignore_patterns: set[str],
|
|
94
|
+
) -> bool:
|
|
95
|
+
"""Create ZIP archive using Python's zipfile module."""
|
|
96
|
+
try:
|
|
97
|
+
import zipfile
|
|
98
|
+
|
|
99
|
+
logger.info(f"Creating ZIP archive: {output_file}, using files: {dist_dir}")
|
|
100
|
+
with zipfile.ZipFile(output_file, "w", zipfile.ZIP_DEFLATED) as zipf:
|
|
101
|
+
for file_path in dist_dir.rglob("*"):
|
|
102
|
+
if file_path.is_file() and not should_ignore(
|
|
103
|
+
file_path, ignore_patterns
|
|
104
|
+
):
|
|
105
|
+
arcname = file_path.relative_to(dist_dir)
|
|
106
|
+
zipf.write(file_path, arcname)
|
|
107
|
+
logger.debug(f"Added: {arcname}")
|
|
108
|
+
|
|
109
|
+
logger.info(f"ZIP archive created successfully: {output_file}")
|
|
110
|
+
return True
|
|
111
|
+
except Exception as e:
|
|
112
|
+
logger.error(f"Failed to create ZIP archive: {e}")
|
|
113
|
+
return False
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
def archive_7z(
|
|
117
|
+
dist_dir: Path,
|
|
118
|
+
output_file: Path,
|
|
119
|
+
ignore_patterns: set[str],
|
|
120
|
+
) -> bool:
|
|
121
|
+
"""Create 7z archive using 7z command."""
|
|
122
|
+
if not check_command_available("7z"):
|
|
123
|
+
logger.error("7z command not found. Please install 7-Zip.")
|
|
124
|
+
return False
|
|
125
|
+
|
|
126
|
+
try:
|
|
127
|
+
logger.info(f"Creating 7z archive: {output_file}")
|
|
128
|
+
output_file.parent.mkdir(parents=True, exist_ok=True)
|
|
129
|
+
|
|
130
|
+
# Create temp directory with filtered files
|
|
131
|
+
import tempfile
|
|
132
|
+
|
|
133
|
+
with tempfile.TemporaryDirectory() as temp_dir:
|
|
134
|
+
temp_path = Path(temp_dir) / dist_dir.name
|
|
135
|
+
shutil.copytree(
|
|
136
|
+
dist_dir, temp_path, ignore=shutil.ignore_patterns(*ignore_patterns)
|
|
137
|
+
)
|
|
138
|
+
|
|
139
|
+
cmd = ["7z", "a", "-t7z", str(output_file), str(temp_path / "*")]
|
|
140
|
+
subprocess.run(cmd, check=True, shell=True)
|
|
141
|
+
|
|
142
|
+
logger.info(f"7z archive created successfully: {output_file}")
|
|
143
|
+
return True
|
|
144
|
+
except subprocess.CalledProcessError as e:
|
|
145
|
+
logger.error(f"Failed to create 7z archive: {e}")
|
|
146
|
+
return False
|
|
147
|
+
except Exception as e:
|
|
148
|
+
logger.error(f"Unexpected error creating 7z archive: {e}")
|
|
149
|
+
return False
|
|
150
|
+
|
|
151
|
+
|
|
152
|
+
def create_nsis_script(
|
|
153
|
+
project: Project,
|
|
154
|
+
dist_dir: Path,
|
|
155
|
+
output_file: Path,
|
|
156
|
+
) -> Path | None:
|
|
157
|
+
"""Generate NSIS script file."""
|
|
158
|
+
# Convert source_dir to use forward slashes for NSIS compatibility
|
|
159
|
+
source_dir_str = str(dist_dir).replace("\\", "/")
|
|
160
|
+
|
|
161
|
+
nsis_script = f"""
|
|
162
|
+
!include "MUI2.nsh"
|
|
163
|
+
|
|
164
|
+
Name "{project.name}"
|
|
165
|
+
OutFile "{output_file.name}"
|
|
166
|
+
InstallDir "$PROGRAMFILES\\{project.name}"
|
|
167
|
+
InstallDirRegKey HKCU "Software\\{project.name}" ""
|
|
168
|
+
RequestExecutionLevel admin
|
|
169
|
+
VIProductVersion {project.version.replace(".", ",")}.0
|
|
170
|
+
VIAddVersionKey "ProductName" "{project.name}"
|
|
171
|
+
VIAddVersionKey "ProductVersion" "{project.version}"
|
|
172
|
+
VIAddVersionKey "FileDescription" "{project.description}"
|
|
173
|
+
VIAddVersionKey "FileVersion" "{project.version}"
|
|
174
|
+
|
|
175
|
+
!define MUI_ABORTWARNING
|
|
176
|
+
!insertmacro MUI_PAGE_WELCOME
|
|
177
|
+
!insertmacro MUI_PAGE_LICENSE "LICENSE"
|
|
178
|
+
!insertmacro MUI_PAGE_COMPONENTS
|
|
179
|
+
!insertmacro MUI_PAGE_DIRECTORY
|
|
180
|
+
!insertmacro MUI_PAGE_INSTFILES
|
|
181
|
+
!insertmacro MUI_PAGE_FINISH
|
|
182
|
+
|
|
183
|
+
!insertmacro MUI_UNPAGE_WELCOME
|
|
184
|
+
!insertmacro MUI_UNPAGE_CONFIRM
|
|
185
|
+
!insertmacro MUI_UNPAGE_INSTFILES
|
|
186
|
+
!insertmacro MUI_UNPAGE_FINISH
|
|
187
|
+
|
|
188
|
+
!insertmacro MUI_LANGUAGE "English"
|
|
189
|
+
|
|
190
|
+
Section "{project.name} (required)" SecMain
|
|
191
|
+
SectionIn RO
|
|
192
|
+
SetOutPath "$INSTDIR"
|
|
193
|
+
File /r "{source_dir_str}\\*.*"
|
|
194
|
+
SectionEnd
|
|
195
|
+
|
|
196
|
+
Section "Start Menu Shortcuts"
|
|
197
|
+
CreateDirectory "$SMPROGRAMS\\{project.name}"
|
|
198
|
+
CreateShortcut "$SMPROGRAMS\\{project.name}\\{project.name}.lnk" "$INSTDIR\\main.py"
|
|
199
|
+
SectionEnd
|
|
200
|
+
|
|
201
|
+
Section "Uninstall"
|
|
202
|
+
Delete "$SMPROGRAMS\\{project.name}\\*.*"
|
|
203
|
+
RMDir "$SMPROGRAMS\\{project.name}"
|
|
204
|
+
RMDir /r "$INSTDIR"
|
|
205
|
+
DeleteRegKey /ifempty HKCU "Software\\{project.name}"
|
|
206
|
+
SectionEnd
|
|
207
|
+
"""
|
|
208
|
+
|
|
209
|
+
script_file = output_file.parent / f"{project.name}_installer.nsi"
|
|
210
|
+
try:
|
|
211
|
+
with open(script_file, "w", encoding="utf-8") as f:
|
|
212
|
+
f.write(nsis_script)
|
|
213
|
+
logger.info(f"NSIS script generated: {script_file}")
|
|
214
|
+
return script_file
|
|
215
|
+
except Exception as e:
|
|
216
|
+
logger.error(f"Failed to generate NSIS script: {e}")
|
|
217
|
+
return None
|
|
218
|
+
|
|
219
|
+
|
|
220
|
+
def archive_nsis(
|
|
221
|
+
project: Project,
|
|
222
|
+
dist_dir: Path,
|
|
223
|
+
output_file: Path,
|
|
224
|
+
ignore_patterns: set[str],
|
|
225
|
+
) -> bool:
|
|
226
|
+
"""Create NSIS installer using makensis command."""
|
|
227
|
+
if not check_command_available("makensis"):
|
|
228
|
+
logger.error("makensis command not found. Please install NSIS.")
|
|
229
|
+
return False
|
|
230
|
+
|
|
231
|
+
try:
|
|
232
|
+
logger.info(f"Creating NSIS installer: {output_file}")
|
|
233
|
+
output_file.parent.mkdir(parents=True, exist_ok=True)
|
|
234
|
+
|
|
235
|
+
# Create temp directory with filtered files
|
|
236
|
+
import tempfile
|
|
237
|
+
|
|
238
|
+
with tempfile.TemporaryDirectory() as temp_dir:
|
|
239
|
+
temp_path = Path(temp_dir) / dist_dir.name
|
|
240
|
+
shutil.copytree(
|
|
241
|
+
dist_dir, temp_path, ignore=shutil.ignore_patterns(*ignore_patterns)
|
|
242
|
+
)
|
|
243
|
+
|
|
244
|
+
script_file = create_nsis_script(
|
|
245
|
+
project=project,
|
|
246
|
+
dist_dir=temp_path,
|
|
247
|
+
output_file=output_file,
|
|
248
|
+
)
|
|
249
|
+
if not script_file or not script_file.exists():
|
|
250
|
+
return False
|
|
251
|
+
|
|
252
|
+
cmd = ["makensis", str(script_file)]
|
|
253
|
+
subprocess.run(cmd, check=True, shell=True)
|
|
254
|
+
|
|
255
|
+
logger.info(f"NSIS installer created successfully: {output_file}")
|
|
256
|
+
return True
|
|
257
|
+
except subprocess.CalledProcessError as e:
|
|
258
|
+
logger.error(f"Failed to create NSIS installer: {e}")
|
|
259
|
+
return False
|
|
260
|
+
except Exception as e:
|
|
261
|
+
logger.error(f"Unexpected error creating NSIS installer: {e}")
|
|
262
|
+
return False
|
|
263
|
+
|
|
264
|
+
|
|
265
|
+
def archive_project(
|
|
266
|
+
project: Project,
|
|
267
|
+
directory: Path,
|
|
268
|
+
format: str,
|
|
269
|
+
ignore_patterns: set[str],
|
|
270
|
+
) -> bool:
|
|
271
|
+
"""Archive a single project."""
|
|
272
|
+
logger.info(f"Processing project: {project.name}")
|
|
273
|
+
|
|
274
|
+
dist_dir = directory / "dist"
|
|
275
|
+
if not dist_dir:
|
|
276
|
+
logger.warning(
|
|
277
|
+
f"Project dist directory not found: {project.name}, please build project first"
|
|
278
|
+
)
|
|
279
|
+
return False
|
|
280
|
+
|
|
281
|
+
output_dir = directory / "build"
|
|
282
|
+
output_dir.mkdir(parents=True, exist_ok=True)
|
|
283
|
+
if format == "nsis":
|
|
284
|
+
output_file = output_dir / f"{project.name}-{project.version}-setup.exe"
|
|
285
|
+
return archive_nsis(
|
|
286
|
+
project=project,
|
|
287
|
+
dist_dir=dist_dir,
|
|
288
|
+
output_file=output_file,
|
|
289
|
+
ignore_patterns=ignore_patterns,
|
|
290
|
+
)
|
|
291
|
+
else:
|
|
292
|
+
extension = "zip" if format == "zip" else "7z"
|
|
293
|
+
output_file = output_dir / f"{project.name}-{project.version}.{extension}"
|
|
294
|
+
|
|
295
|
+
if format == "zip":
|
|
296
|
+
return archive_zip(
|
|
297
|
+
dist_dir=dist_dir,
|
|
298
|
+
output_file=output_file,
|
|
299
|
+
ignore_patterns=ignore_patterns,
|
|
300
|
+
)
|
|
301
|
+
elif format == "7z":
|
|
302
|
+
return archive_7z(
|
|
303
|
+
dist_dir=dist_dir,
|
|
304
|
+
output_file=output_file,
|
|
305
|
+
ignore_patterns=ignore_patterns,
|
|
306
|
+
)
|
|
307
|
+
|
|
308
|
+
logger.error(f"Unsupported format: {format}")
|
|
309
|
+
return False
|
|
310
|
+
|
|
311
|
+
|
|
312
|
+
def archive_projects(
|
|
313
|
+
directory: Path,
|
|
314
|
+
format: str,
|
|
315
|
+
projects_to_archive: list | None = None,
|
|
316
|
+
ignore_patterns: set | None = None,
|
|
317
|
+
) -> None:
|
|
318
|
+
"""Archive all projects in the directory."""
|
|
319
|
+
if format not in ARCHIVE_FORMATS:
|
|
320
|
+
logger.error(
|
|
321
|
+
f"Unsupported format: {format}. Supported formats: {', '.join(ARCHIVE_FORMATS)}"
|
|
322
|
+
)
|
|
323
|
+
return
|
|
324
|
+
else:
|
|
325
|
+
logger.debug(f"Archiving projects in {directory} to `{format}` format")
|
|
326
|
+
|
|
327
|
+
ignore_patterns = (
|
|
328
|
+
ignore_patterns | DEFAULT_IGNORE_PATTERNS
|
|
329
|
+
if ignore_patterns
|
|
330
|
+
else set(DEFAULT_IGNORE_PATTERNS)
|
|
331
|
+
)
|
|
332
|
+
logger.debug(f"Ignoring patterns: {', '.join(ignore_patterns)}")
|
|
333
|
+
|
|
334
|
+
project_config = Solution.from_directory(root_dir=directory)
|
|
335
|
+
projects = project_config.projects
|
|
336
|
+
|
|
337
|
+
if not projects:
|
|
338
|
+
logger.error("No projects found in configuration")
|
|
339
|
+
return
|
|
340
|
+
else:
|
|
341
|
+
logger.debug(f"Found {len(projects)} projects")
|
|
342
|
+
|
|
343
|
+
projects_to_archive = projects_to_archive or list(projects.keys())
|
|
344
|
+
if not projects_to_archive:
|
|
345
|
+
logger.error("No projects to archive")
|
|
346
|
+
return
|
|
347
|
+
else:
|
|
348
|
+
logger.debug(f"Archiving projects: {', '.join(projects_to_archive)}")
|
|
349
|
+
|
|
350
|
+
success_count = 0
|
|
351
|
+
total_count = 0
|
|
352
|
+
for project_name in projects_to_archive:
|
|
353
|
+
if project_name not in projects:
|
|
354
|
+
logger.warning(f"Project not found: {project_name}")
|
|
355
|
+
continue
|
|
356
|
+
project = projects[project_name]
|
|
357
|
+
total_count += 1
|
|
358
|
+
if archive_project(
|
|
359
|
+
project=project,
|
|
360
|
+
directory=directory,
|
|
361
|
+
format=format,
|
|
362
|
+
ignore_patterns=ignore_patterns,
|
|
363
|
+
):
|
|
364
|
+
success_count += 1
|
|
365
|
+
|
|
366
|
+
if success_count:
|
|
367
|
+
logger.info(
|
|
368
|
+
f"Archiving complete: {success_count}/{total_count} projects successfully archived"
|
|
369
|
+
)
|
|
370
|
+
else:
|
|
371
|
+
logger.error("Archiving failed")
|
|
372
|
+
|
|
373
|
+
|
|
374
|
+
def create_parser() -> argparse.ArgumentParser:
|
|
375
|
+
"""Create parser for command line arguments."""
|
|
376
|
+
parser = argparse.ArgumentParser(description="Archive projects in directory")
|
|
377
|
+
parser.add_argument(
|
|
378
|
+
"directory",
|
|
379
|
+
type=Path,
|
|
380
|
+
nargs="?",
|
|
381
|
+
default=Path.cwd(),
|
|
382
|
+
help="Directory to archive for projects.",
|
|
383
|
+
)
|
|
384
|
+
parser.add_argument("--debug", "-d", action="store_true", help="Debug mode")
|
|
385
|
+
parser.add_argument(
|
|
386
|
+
"--format",
|
|
387
|
+
"-f",
|
|
388
|
+
type=str,
|
|
389
|
+
default="zip",
|
|
390
|
+
choices=ARCHIVE_FORMATS,
|
|
391
|
+
help=f"Archive format ({', '.join(ARCHIVE_FORMATS)})",
|
|
392
|
+
)
|
|
393
|
+
parser.add_argument(
|
|
394
|
+
"--project",
|
|
395
|
+
"-p",
|
|
396
|
+
type=str,
|
|
397
|
+
nargs="*",
|
|
398
|
+
help="Specific project(s) to archive (default: all projects)",
|
|
399
|
+
)
|
|
400
|
+
parser.add_argument(
|
|
401
|
+
"--ignore", type=str, nargs="*", help="Additional ignore patterns"
|
|
402
|
+
)
|
|
403
|
+
return parser
|
|
404
|
+
|
|
405
|
+
|
|
406
|
+
def main():
|
|
407
|
+
parser = create_parser()
|
|
408
|
+
args = parser.parse_args()
|
|
409
|
+
|
|
410
|
+
if args.debug:
|
|
411
|
+
logger.setLevel(logging.DEBUG)
|
|
412
|
+
|
|
413
|
+
archive_projects(
|
|
414
|
+
directory=args.directory,
|
|
415
|
+
format=args.format,
|
|
416
|
+
projects_to_archive=args.project,
|
|
417
|
+
ignore_patterns=set(args.ignore or []),
|
|
418
|
+
)
|