claude-mpm 4.2.13__py3-none-any.whl → 4.2.14__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. claude_mpm/core/constants.py +65 -0
  2. claude_mpm/core/error_handler.py +625 -0
  3. claude_mpm/core/file_utils.py +770 -0
  4. claude_mpm/core/logging_utils.py +502 -0
  5. claude_mpm/dashboard/static/dist/components/code-tree.js +1 -1
  6. claude_mpm/dashboard/static/dist/components/file-viewer.js +1 -1
  7. claude_mpm/dashboard/static/dist/dashboard.js +1 -1
  8. claude_mpm/dashboard/static/dist/socket-client.js +1 -1
  9. claude_mpm/dashboard/static/js/components/code-simple.js +44 -1
  10. claude_mpm/dashboard/static/js/components/code-tree/tree-breadcrumb.js +353 -0
  11. claude_mpm/dashboard/static/js/components/code-tree/tree-constants.js +235 -0
  12. claude_mpm/dashboard/static/js/components/code-tree/tree-search.js +409 -0
  13. claude_mpm/dashboard/static/js/components/code-tree/tree-utils.js +435 -0
  14. claude_mpm/dashboard/static/js/components/code-tree.js +29 -5
  15. claude_mpm/dashboard/static/js/components/file-viewer.js +69 -27
  16. claude_mpm/dashboard/static/js/components/session-manager.js +1 -1
  17. claude_mpm/dashboard/static/js/components/working-directory.js +18 -9
  18. claude_mpm/dashboard/static/js/dashboard.js +55 -9
  19. claude_mpm/dashboard/static/js/shared/dom-helpers.js +396 -0
  20. claude_mpm/dashboard/static/js/shared/event-bus.js +330 -0
  21. claude_mpm/dashboard/static/js/shared/logger.js +385 -0
  22. claude_mpm/dashboard/static/js/shared/tooltip-service.js +253 -0
  23. claude_mpm/dashboard/static/js/socket-client.js +18 -0
  24. claude_mpm/dashboard/templates/index.html +21 -8
  25. claude_mpm/services/monitor/handlers/__init__.py +2 -1
  26. claude_mpm/services/monitor/handlers/file.py +263 -0
  27. claude_mpm/services/monitor/server.py +81 -1
  28. claude_mpm/services/socketio/handlers/file.py +40 -5
  29. {claude_mpm-4.2.13.dist-info → claude_mpm-4.2.14.dist-info}/METADATA +1 -1
  30. {claude_mpm-4.2.13.dist-info → claude_mpm-4.2.14.dist-info}/RECORD +34 -22
  31. {claude_mpm-4.2.13.dist-info → claude_mpm-4.2.14.dist-info}/WHEEL +0 -0
  32. {claude_mpm-4.2.13.dist-info → claude_mpm-4.2.14.dist-info}/entry_points.txt +0 -0
  33. {claude_mpm-4.2.13.dist-info → claude_mpm-4.2.14.dist-info}/licenses/LICENSE +0 -0
  34. {claude_mpm-4.2.13.dist-info → claude_mpm-4.2.14.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,770 @@
1
+ """Centralized file operation utilities for Claude MPM.
2
+
3
+ This module consolidates 150+ repeated file I/O patterns across the codebase,
4
+ providing safe, consistent, and error-handled file operations.
5
+ """
6
+
7
+ import errno
8
+ import fcntl
9
+ import hashlib
10
+ import json
11
+ import os
12
+ import shutil
13
+ import tempfile
14
+ from contextlib import contextmanager
15
+ from pathlib import Path
16
+ from typing import Any, List, Optional, Union
17
+
18
+ import yaml
19
+
20
+ from claude_mpm.core.logging_utils import get_logger
21
+
22
+ logger = get_logger(__name__)
23
+
24
+
25
+ # ==============================================================================
26
+ # PATH UTILITIES
27
+ # ==============================================================================
28
+
29
+
30
+ def ensure_directory(path: Union[str, Path], mode: int = 0o755) -> Path:
31
+ """Ensure a directory exists, creating it if necessary.
32
+
33
+ Replaces the common pattern:
34
+ os.makedirs(path, exist_ok=True)
35
+
36
+ Args:
37
+ path: Directory path
38
+ mode: Directory permissions
39
+
40
+ Returns:
41
+ Path object for the directory
42
+ """
43
+ path = Path(path)
44
+ path.mkdir(parents=True, exist_ok=True, mode=mode)
45
+ return path
46
+
47
+
48
+ def ensure_parent_directory(filepath: Union[str, Path]) -> Path:
49
+ """Ensure the parent directory of a file exists.
50
+
51
+ Args:
52
+ filepath: File path
53
+
54
+ Returns:
55
+ Path object for the parent directory
56
+ """
57
+ filepath = Path(filepath)
58
+ return ensure_directory(filepath.parent)
59
+
60
+
61
+ def safe_path_join(*parts: Union[str, Path]) -> Path:
62
+ """Safely join path components, preventing path traversal.
63
+
64
+ Args:
65
+ *parts: Path components to join
66
+
67
+ Returns:
68
+ Joined path
69
+
70
+ Raises:
71
+ ValueError: If path traversal is attempted
72
+ """
73
+ # Join the parts
74
+ path = Path(*parts)
75
+
76
+ # Resolve to absolute path and check for traversal
77
+ resolved = path.resolve()
78
+ base = Path(parts[0]).resolve()
79
+
80
+ # Ensure the resolved path is under the base path
81
+ try:
82
+ resolved.relative_to(base)
83
+ except ValueError:
84
+ raise ValueError(f"Path traversal detected: {path}")
85
+
86
+ return resolved
87
+
88
+
89
+ def is_safe_path(path: Union[str, Path], base_dir: Union[str, Path]) -> bool:
90
+ """Check if a path is safe (no traversal outside base directory).
91
+
92
+ Args:
93
+ path: Path to check
94
+ base_dir: Base directory that path should be under
95
+
96
+ Returns:
97
+ True if path is safe, False otherwise
98
+ """
99
+ try:
100
+ path = Path(path).resolve()
101
+ base = Path(base_dir).resolve()
102
+ path.relative_to(base)
103
+ return True
104
+ except ValueError:
105
+ return False
106
+
107
+
108
+ def get_relative_path(
109
+ path: Union[str, Path], base: Optional[Union[str, Path]] = None
110
+ ) -> Path:
111
+ """Get relative path from base directory.
112
+
113
+ Args:
114
+ path: Path to make relative
115
+ base: Base directory (defaults to current working directory)
116
+
117
+ Returns:
118
+ Relative path
119
+ """
120
+ path = Path(path)
121
+ base = Path(base) if base else Path.cwd()
122
+
123
+ try:
124
+ return path.relative_to(base)
125
+ except ValueError:
126
+ # Path is not relative to base, return absolute path
127
+ return path.absolute()
128
+
129
+
130
+ # ==============================================================================
131
+ # FILE READING OPERATIONS
132
+ # ==============================================================================
133
+
134
+
135
+ def safe_read(
136
+ filepath: Union[str, Path],
137
+ mode: str = "r",
138
+ encoding: str = "utf-8",
139
+ default: Any = None,
140
+ max_size: Optional[int] = None,
141
+ ) -> Union[str, bytes, Any]:
142
+ """Safely read a file with error handling.
143
+
144
+ Replaces the common pattern:
145
+ with open(file, 'r') as f:
146
+ content = f.read()
147
+
148
+ Args:
149
+ filepath: Path to file
150
+ mode: Read mode ('r' for text, 'rb' for binary)
151
+ encoding: Text encoding (ignored for binary mode)
152
+ default: Default value if file doesn't exist or error occurs
153
+ max_size: Maximum file size to read (bytes)
154
+
155
+ Returns:
156
+ File contents or default value
157
+ """
158
+ filepath = Path(filepath)
159
+
160
+ # Check file exists
161
+ if not filepath.exists():
162
+ logger.debug(f"File not found: {filepath}")
163
+ return default
164
+
165
+ # Check file size if limit specified
166
+ if max_size is not None:
167
+ file_size = filepath.stat().st_size
168
+ if file_size > max_size:
169
+ logger.warning(
170
+ f"File {filepath} exceeds size limit ({file_size} > {max_size})"
171
+ )
172
+ return default
173
+
174
+ try:
175
+ if "b" in mode:
176
+ with open(filepath, mode) as f:
177
+ return f.read()
178
+ else:
179
+ with open(filepath, mode, encoding=encoding) as f:
180
+ return f.read()
181
+ except (OSError, UnicodeDecodeError) as e:
182
+ logger.error(f"Error reading file {filepath}: {e}")
183
+ return default
184
+
185
+
186
+ def safe_read_lines(
187
+ filepath: Union[str, Path],
188
+ encoding: str = "utf-8",
189
+ max_lines: Optional[int] = None,
190
+ skip_empty: bool = False,
191
+ ) -> List[str]:
192
+ """Safely read lines from a file.
193
+
194
+ Args:
195
+ filepath: Path to file
196
+ encoding: Text encoding
197
+ max_lines: Maximum number of lines to read
198
+ skip_empty: Skip empty lines
199
+
200
+ Returns:
201
+ List of lines (empty list on error)
202
+ """
203
+ filepath = Path(filepath)
204
+
205
+ if not filepath.exists():
206
+ return []
207
+
208
+ try:
209
+ with open(filepath, encoding=encoding) as f:
210
+ lines = []
211
+ for i, line in enumerate(f):
212
+ if max_lines is not None and i >= max_lines:
213
+ break
214
+
215
+ line = line.rstrip("\n")
216
+ if skip_empty and not line.strip():
217
+ continue
218
+
219
+ lines.append(line)
220
+
221
+ return lines
222
+ except (OSError, UnicodeDecodeError) as e:
223
+ logger.error(f"Error reading lines from {filepath}: {e}")
224
+ return []
225
+
226
+
227
+ def safe_read_json(
228
+ filepath: Union[str, Path],
229
+ default: Any = None,
230
+ encoding: str = "utf-8",
231
+ ) -> Any:
232
+ """Safely read and parse a JSON file.
233
+
234
+ Args:
235
+ filepath: Path to JSON file
236
+ default: Default value if file doesn't exist or is invalid
237
+ encoding: Text encoding
238
+
239
+ Returns:
240
+ Parsed JSON data or default value
241
+ """
242
+ content = safe_read(filepath, encoding=encoding, default=None)
243
+
244
+ if content is None:
245
+ return default
246
+
247
+ try:
248
+ return json.loads(content)
249
+ except json.JSONDecodeError as e:
250
+ logger.error(f"Invalid JSON in {filepath}: {e}")
251
+ return default
252
+
253
+
254
+ def safe_read_yaml(
255
+ filepath: Union[str, Path],
256
+ default: Any = None,
257
+ encoding: str = "utf-8",
258
+ ) -> Any:
259
+ """Safely read and parse a YAML file.
260
+
261
+ Args:
262
+ filepath: Path to YAML file
263
+ default: Default value if file doesn't exist or is invalid
264
+ encoding: Text encoding
265
+
266
+ Returns:
267
+ Parsed YAML data or default value
268
+ """
269
+ content = safe_read(filepath, encoding=encoding, default=None)
270
+
271
+ if content is None:
272
+ return default
273
+
274
+ try:
275
+ return yaml.safe_load(content)
276
+ except yaml.YAMLError as e:
277
+ logger.error(f"Invalid YAML in {filepath}: {e}")
278
+ return default
279
+
280
+
281
+ # ==============================================================================
282
+ # FILE WRITING OPERATIONS
283
+ # ==============================================================================
284
+
285
+
286
+ def safe_write(
287
+ filepath: Union[str, Path],
288
+ content: Union[str, bytes],
289
+ mode: str = "w",
290
+ encoding: str = "utf-8",
291
+ create_dirs: bool = True,
292
+ backup: bool = False,
293
+ ) -> bool:
294
+ """Safely write content to a file with error handling.
295
+
296
+ Args:
297
+ filepath: Path to file
298
+ content: Content to write
299
+ mode: Write mode ('w' for text, 'wb' for binary)
300
+ encoding: Text encoding (ignored for binary mode)
301
+ create_dirs: Create parent directories if they don't exist
302
+ backup: Create backup of existing file
303
+
304
+ Returns:
305
+ True if successful, False otherwise
306
+ """
307
+ filepath = Path(filepath)
308
+
309
+ # Create parent directories if requested
310
+ if create_dirs:
311
+ ensure_parent_directory(filepath)
312
+
313
+ # Create backup if requested and file exists
314
+ if backup and filepath.exists():
315
+ backup_path = filepath.with_suffix(filepath.suffix + ".backup")
316
+ try:
317
+ shutil.copy2(filepath, backup_path)
318
+ except OSError as e:
319
+ logger.warning(f"Failed to create backup of {filepath}: {e}")
320
+
321
+ try:
322
+ if "b" in mode:
323
+ with open(filepath, mode) as f:
324
+ f.write(content)
325
+ else:
326
+ with open(filepath, mode, encoding=encoding) as f:
327
+ f.write(content)
328
+ return True
329
+ except OSError as e:
330
+ logger.error(f"Error writing file {filepath}: {e}")
331
+ return False
332
+
333
+
334
+ def atomic_write(
335
+ filepath: Union[str, Path],
336
+ content: Union[str, bytes],
337
+ mode: str = "w",
338
+ encoding: str = "utf-8",
339
+ ) -> bool:
340
+ """Atomically write content to a file.
341
+
342
+ Writes to a temporary file and then moves it to the target path,
343
+ ensuring the write is atomic (all-or-nothing).
344
+
345
+ Args:
346
+ filepath: Path to file
347
+ content: Content to write
348
+ mode: Write mode ('w' for text, 'wb' for binary)
349
+ encoding: Text encoding (ignored for binary mode)
350
+
351
+ Returns:
352
+ True if successful, False otherwise
353
+ """
354
+ filepath = Path(filepath)
355
+ ensure_parent_directory(filepath)
356
+
357
+ # Create temporary file in same directory (for atomic rename)
358
+ temp_fd, temp_path = tempfile.mkstemp(
359
+ dir=filepath.parent, prefix=f".{filepath.name}.", suffix=".tmp"
360
+ )
361
+
362
+ try:
363
+ # Write to temporary file
364
+ with os.fdopen(temp_fd, mode) as f:
365
+ if "b" in mode:
366
+ f.write(content)
367
+ else:
368
+ f.write(content)
369
+
370
+ # Atomic rename
371
+ os.replace(temp_path, filepath)
372
+ return True
373
+
374
+ except OSError as e:
375
+ logger.error(f"Error in atomic write to {filepath}: {e}")
376
+ # Clean up temporary file
377
+ try:
378
+ os.unlink(temp_path)
379
+ except:
380
+ pass
381
+ return False
382
+
383
+
384
+ def safe_write_json(
385
+ filepath: Union[str, Path],
386
+ data: Any,
387
+ indent: int = 2,
388
+ encoding: str = "utf-8",
389
+ atomic: bool = False,
390
+ ) -> bool:
391
+ """Safely write data to a JSON file.
392
+
393
+ Args:
394
+ filepath: Path to JSON file
395
+ data: Data to serialize
396
+ indent: JSON indentation
397
+ encoding: Text encoding
398
+ atomic: Use atomic write
399
+
400
+ Returns:
401
+ True if successful, False otherwise
402
+ """
403
+ try:
404
+ content = json.dumps(data, indent=indent, ensure_ascii=False)
405
+ if atomic:
406
+ return atomic_write(filepath, content, encoding=encoding)
407
+ return safe_write(filepath, content, encoding=encoding)
408
+ except (TypeError, ValueError) as e:
409
+ logger.error(f"Error serializing JSON for {filepath}: {e}")
410
+ return False
411
+
412
+
413
+ def safe_write_yaml(
414
+ filepath: Union[str, Path],
415
+ data: Any,
416
+ encoding: str = "utf-8",
417
+ atomic: bool = False,
418
+ ) -> bool:
419
+ """Safely write data to a YAML file.
420
+
421
+ Args:
422
+ filepath: Path to YAML file
423
+ data: Data to serialize
424
+ encoding: Text encoding
425
+ atomic: Use atomic write
426
+
427
+ Returns:
428
+ True if successful, False otherwise
429
+ """
430
+ try:
431
+ content = yaml.safe_dump(data, default_flow_style=False, allow_unicode=True)
432
+ if atomic:
433
+ return atomic_write(filepath, content, encoding=encoding)
434
+ return safe_write(filepath, content, encoding=encoding)
435
+ except yaml.YAMLError as e:
436
+ logger.error(f"Error serializing YAML for {filepath}: {e}")
437
+ return False
438
+
439
+
440
+ def append_to_file(
441
+ filepath: Union[str, Path],
442
+ content: str,
443
+ encoding: str = "utf-8",
444
+ create: bool = True,
445
+ ) -> bool:
446
+ """Safely append content to a file.
447
+
448
+ Args:
449
+ filepath: Path to file
450
+ content: Content to append
451
+ encoding: Text encoding
452
+ create: Create file if it doesn't exist
453
+
454
+ Returns:
455
+ True if successful, False otherwise
456
+ """
457
+ filepath = Path(filepath)
458
+
459
+ if not filepath.exists() and not create:
460
+ return False
461
+
462
+ return safe_write(filepath, content, mode="a", encoding=encoding)
463
+
464
+
465
+ # ==============================================================================
466
+ # FILE OPERATIONS
467
+ # ==============================================================================
468
+
469
+
470
+ def safe_copy(
471
+ source: Union[str, Path],
472
+ destination: Union[str, Path],
473
+ overwrite: bool = False,
474
+ preserve_metadata: bool = True,
475
+ ) -> bool:
476
+ """Safely copy a file.
477
+
478
+ Args:
479
+ source: Source file path
480
+ destination: Destination file path
481
+ overwrite: Overwrite if destination exists
482
+ preserve_metadata: Preserve file metadata
483
+
484
+ Returns:
485
+ True if successful, False otherwise
486
+ """
487
+ source = Path(source)
488
+ destination = Path(destination)
489
+
490
+ if not source.exists():
491
+ logger.error(f"Source file not found: {source}")
492
+ return False
493
+
494
+ if destination.exists() and not overwrite:
495
+ logger.warning(f"Destination already exists: {destination}")
496
+ return False
497
+
498
+ try:
499
+ ensure_parent_directory(destination)
500
+
501
+ if preserve_metadata:
502
+ shutil.copy2(source, destination)
503
+ else:
504
+ shutil.copy(source, destination)
505
+
506
+ return True
507
+ except OSError as e:
508
+ logger.error(f"Error copying {source} to {destination}: {e}")
509
+ return False
510
+
511
+
512
+ def safe_move(
513
+ source: Union[str, Path],
514
+ destination: Union[str, Path],
515
+ overwrite: bool = False,
516
+ ) -> bool:
517
+ """Safely move a file.
518
+
519
+ Args:
520
+ source: Source file path
521
+ destination: Destination file path
522
+ overwrite: Overwrite if destination exists
523
+
524
+ Returns:
525
+ True if successful, False otherwise
526
+ """
527
+ source = Path(source)
528
+ destination = Path(destination)
529
+
530
+ if not source.exists():
531
+ logger.error(f"Source file not found: {source}")
532
+ return False
533
+
534
+ if destination.exists() and not overwrite:
535
+ logger.warning(f"Destination already exists: {destination}")
536
+ return False
537
+
538
+ try:
539
+ ensure_parent_directory(destination)
540
+ shutil.move(str(source), str(destination))
541
+ return True
542
+ except OSError as e:
543
+ logger.error(f"Error moving {source} to {destination}: {e}")
544
+ return False
545
+
546
+
547
+ def safe_delete(
548
+ filepath: Union[str, Path],
549
+ missing_ok: bool = True,
550
+ ) -> bool:
551
+ """Safely delete a file.
552
+
553
+ Args:
554
+ filepath: File path to delete
555
+ missing_ok: Don't error if file doesn't exist
556
+
557
+ Returns:
558
+ True if successful or file didn't exist, False on error
559
+ """
560
+ filepath = Path(filepath)
561
+
562
+ if not filepath.exists():
563
+ return missing_ok
564
+
565
+ try:
566
+ if filepath.is_dir():
567
+ shutil.rmtree(filepath)
568
+ else:
569
+ filepath.unlink()
570
+ return True
571
+ except OSError as e:
572
+ logger.error(f"Error deleting {filepath}: {e}")
573
+ return False
574
+
575
+
576
+ def safe_rename(
577
+ old_path: Union[str, Path],
578
+ new_path: Union[str, Path],
579
+ overwrite: bool = False,
580
+ ) -> bool:
581
+ """Safely rename a file or directory.
582
+
583
+ Args:
584
+ old_path: Current path
585
+ new_path: New path
586
+ overwrite: Overwrite if new path exists
587
+
588
+ Returns:
589
+ True if successful, False otherwise
590
+ """
591
+ old_path = Path(old_path)
592
+ new_path = Path(new_path)
593
+
594
+ if not old_path.exists():
595
+ logger.error(f"Source path not found: {old_path}")
596
+ return False
597
+
598
+ if new_path.exists() and not overwrite:
599
+ logger.warning(f"Destination already exists: {new_path}")
600
+ return False
601
+
602
+ try:
603
+ old_path.rename(new_path)
604
+ return True
605
+ except OSError as e:
606
+ logger.error(f"Error renaming {old_path} to {new_path}: {e}")
607
+ return False
608
+
609
+
610
+ # ==============================================================================
611
+ # FILE LOCKING
612
+ # ==============================================================================
613
+
614
+
615
+ @contextmanager
616
+ def file_lock(filepath: Union[str, Path], timeout: float = 5.0):
617
+ """Context manager for file locking.
618
+
619
+ Args:
620
+ filepath: Path to lock file
621
+ timeout: Maximum time to wait for lock
622
+
623
+ Yields:
624
+ File handle with exclusive lock
625
+ """
626
+ filepath = Path(filepath)
627
+ ensure_parent_directory(filepath)
628
+
629
+ lock_file = filepath.with_suffix(filepath.suffix + ".lock")
630
+ lock_handle = None
631
+
632
+ try:
633
+ import time
634
+
635
+ start_time = time.time()
636
+
637
+ while True:
638
+ try:
639
+ lock_handle = open(lock_file, "w")
640
+ fcntl.flock(lock_handle, fcntl.LOCK_EX | fcntl.LOCK_NB)
641
+ break
642
+ except OSError as e:
643
+ if e.errno != errno.EAGAIN:
644
+ raise
645
+ if time.time() - start_time > timeout:
646
+ raise TimeoutError(f"Could not acquire lock for {filepath}")
647
+ time.sleep(0.1)
648
+
649
+ yield lock_handle
650
+
651
+ finally:
652
+ if lock_handle:
653
+ try:
654
+ fcntl.flock(lock_handle, fcntl.LOCK_UN)
655
+ lock_handle.close()
656
+ lock_file.unlink(missing_ok=True)
657
+ except:
658
+ pass
659
+
660
+
661
+ # ==============================================================================
662
+ # FILE VALIDATION
663
+ # ==============================================================================
664
+
665
+
666
+ def validate_file(
667
+ filepath: Union[str, Path],
668
+ must_exist: bool = True,
669
+ min_size: Optional[int] = None,
670
+ max_size: Optional[int] = None,
671
+ extensions: Optional[List[str]] = None,
672
+ ) -> bool:
673
+ """Validate a file meets specified criteria.
674
+
675
+ Args:
676
+ filepath: Path to file
677
+ must_exist: File must exist
678
+ min_size: Minimum file size in bytes
679
+ max_size: Maximum file size in bytes
680
+ extensions: Allowed file extensions
681
+
682
+ Returns:
683
+ True if file is valid, False otherwise
684
+ """
685
+ filepath = Path(filepath)
686
+
687
+ # Check existence
688
+ if must_exist and not filepath.exists():
689
+ return False
690
+
691
+ if not filepath.exists():
692
+ return not must_exist
693
+
694
+ # Check it's a file
695
+ if not filepath.is_file():
696
+ return False
697
+
698
+ # Check size
699
+ size = filepath.stat().st_size
700
+ if min_size is not None and size < min_size:
701
+ return False
702
+ if max_size is not None and size > max_size:
703
+ return False
704
+
705
+ # Check extension
706
+ if extensions and filepath.suffix not in extensions:
707
+ return False
708
+
709
+ return True
710
+
711
+
712
+ def get_file_hash(
713
+ filepath: Union[str, Path],
714
+ algorithm: str = "sha256",
715
+ ) -> Optional[str]:
716
+ """Calculate hash of a file.
717
+
718
+ Args:
719
+ filepath: Path to file
720
+ algorithm: Hash algorithm (sha256, md5, etc.)
721
+
722
+ Returns:
723
+ Hex digest of file hash or None on error
724
+ """
725
+ filepath = Path(filepath)
726
+
727
+ if not filepath.exists():
728
+ return None
729
+
730
+ try:
731
+ hasher = hashlib.new(algorithm)
732
+ with open(filepath, "rb") as f:
733
+ for chunk in iter(lambda: f.read(8192), b""):
734
+ hasher.update(chunk)
735
+ return hasher.hexdigest()
736
+ except OSError as e:
737
+ logger.error(f"Error hashing {filepath}: {e}")
738
+ return None
739
+
740
+
741
+ def find_files(
742
+ directory: Union[str, Path],
743
+ pattern: str = "*",
744
+ recursive: bool = True,
745
+ file_only: bool = True,
746
+ ) -> List[Path]:
747
+ """Find files matching a pattern.
748
+
749
+ Args:
750
+ directory: Directory to search
751
+ pattern: Glob pattern
752
+ recursive: Search recursively
753
+ file_only: Return only files (not directories)
754
+
755
+ Returns:
756
+ List of matching paths
757
+ """
758
+ directory = Path(directory)
759
+
760
+ if not directory.exists():
761
+ return []
762
+
763
+ if recursive:
764
+ paths = directory.rglob(pattern)
765
+ else:
766
+ paths = directory.glob(pattern)
767
+
768
+ if file_only:
769
+ return [p for p in paths if p.is_file()]
770
+ return list(paths)